diff --git a/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/group/impl/GroupManagerImpl.java b/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/group/impl/GroupManagerImpl.java index 7d8c81ff..17216793 100644 --- a/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/group/impl/GroupManagerImpl.java +++ b/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/group/impl/GroupManagerImpl.java @@ -8,9 +8,13 @@ import com.xiaojukeji.know.streaming.km.common.bean.dto.group.GroupOffsetResetDT import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO; import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationSortDTO; import com.xiaojukeji.know.streaming.km.common.bean.dto.partition.PartitionOffsetDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.entity.group.Group; import com.xiaojukeji.know.streaming.km.common.bean.entity.group.GroupTopic; import com.xiaojukeji.know.streaming.km.common.bean.entity.group.GroupTopicMember; +import com.xiaojukeji.know.streaming.km.common.bean.entity.kafka.KSGroupDescription; +import com.xiaojukeji.know.streaming.km.common.bean.entity.kafka.KSMemberConsumerAssignment; +import com.xiaojukeji.know.streaming.km.common.bean.entity.kafka.KSMemberDescription; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.GroupMetrics; import com.xiaojukeji.know.streaming.km.common.bean.entity.offset.KSOffsetSpec; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult; @@ -35,14 +39,13 @@ import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; import com.xiaojukeji.know.streaming.km.common.utils.PaginationMetricsUtil; import com.xiaojukeji.know.streaming.km.common.utils.PaginationUtil; import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; +import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService; import com.xiaojukeji.know.streaming.km.core.service.group.GroupMetricService; import com.xiaojukeji.know.streaming.km.core.service.group.GroupService; import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionService; import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService; import com.xiaojukeji.know.streaming.km.core.service.version.metrics.kafka.GroupMetricVersionItems; import com.xiaojukeji.know.streaming.km.persistence.es.dao.GroupMetricESDAO; -import org.apache.kafka.clients.admin.ConsumerGroupDescription; -import org.apache.kafka.clients.admin.MemberDescription; import org.apache.kafka.common.ConsumerGroupState; import org.apache.kafka.common.TopicPartition; import org.springframework.beans.factory.annotation.Autowired; @@ -51,6 +54,8 @@ import org.springframework.stereotype.Component; import java.util.*; import java.util.stream.Collectors; +import static com.xiaojukeji.know.streaming.km.common.enums.group.GroupTypeEnum.CONNECT_CLUSTER_PROTOCOL_TYPE; + @Component public class GroupManagerImpl implements GroupManager { private static final ILog log = LogFactory.getLog(GroupManagerImpl.class); @@ -70,6 +75,9 @@ public class GroupManagerImpl implements GroupManager { @Autowired private GroupMetricESDAO groupMetricESDAO; + @Autowired + private ClusterPhyService clusterPhyService; + @Override public PaginationResult pagingGroupMembers(Long clusterPhyId, String topicName, @@ -140,6 +148,11 @@ public class GroupManagerImpl implements GroupManager { String groupName, List latestMetricNames, PaginationSortDTO dto) throws NotExistException, AdminOperateException { + ClusterPhy clusterPhy = clusterPhyService.getClusterByCluster(clusterPhyId); + if (clusterPhy == null) { + return PaginationResult.buildFailure(MsgConstant.getClusterPhyNotExist(clusterPhyId), dto); + } + // 获取消费组消费的TopicPartition列表 Map consumedOffsetMap = groupService.getGroupOffsetFromKafka(clusterPhyId, groupName); List partitionList = consumedOffsetMap.keySet() @@ -150,13 +163,18 @@ public class GroupManagerImpl implements GroupManager { Collections.sort(partitionList); // 获取消费组当前运行信息 - ConsumerGroupDescription groupDescription = groupService.getGroupDescriptionFromKafka(clusterPhyId, groupName); + KSGroupDescription groupDescription = groupService.getGroupDescriptionFromKafka(clusterPhy, groupName); // 转换存储格式 - Map tpMemberMap = new HashMap<>(); - for (MemberDescription description: groupDescription.members()) { - for (TopicPartition tp: description.assignment().topicPartitions()) { - tpMemberMap.put(tp, description); + Map tpMemberMap = new HashMap<>(); + + //如果不是connect集群 + if (!groupDescription.protocolType().equals(CONNECT_CLUSTER_PROTOCOL_TYPE)) { + for (KSMemberDescription description : groupDescription.members()) { + KSMemberConsumerAssignment assignment = (KSMemberConsumerAssignment) description.assignment(); + for (TopicPartition tp : assignment.topicPartitions()) { + tpMemberMap.put(tp, description); + } } } @@ -173,11 +191,11 @@ public class GroupManagerImpl implements GroupManager { vo.setTopicName(topicName); vo.setPartitionId(groupMetrics.getPartitionId()); - MemberDescription memberDescription = tpMemberMap.get(new TopicPartition(topicName, groupMetrics.getPartitionId())); - if (memberDescription != null) { - vo.setMemberId(memberDescription.consumerId()); - vo.setHost(memberDescription.host()); - vo.setClientId(memberDescription.clientId()); + KSMemberDescription ksMemberDescription = tpMemberMap.get(new TopicPartition(topicName, groupMetrics.getPartitionId())); + if (ksMemberDescription != null) { + vo.setMemberId(ksMemberDescription.consumerId()); + vo.setHost(ksMemberDescription.host()); + vo.setClientId(ksMemberDescription.clientId()); } vo.setLatestMetrics(groupMetrics); @@ -203,7 +221,12 @@ public class GroupManagerImpl implements GroupManager { return rv; } - ConsumerGroupDescription description = groupService.getGroupDescriptionFromKafka(dto.getClusterId(), dto.getGroupName()); + ClusterPhy clusterPhy = clusterPhyService.getClusterByCluster(dto.getClusterId()); + if (clusterPhy == null) { + return Result.buildFromRSAndMsg(ResultStatus.CLUSTER_NOT_EXIST, MsgConstant.getClusterPhyNotExist(dto.getClusterId())); + } + + KSGroupDescription description = groupService.getGroupDescriptionFromKafka(clusterPhy, dto.getGroupName()); if (ConsumerGroupState.DEAD.equals(description.state()) && !dto.isCreateIfNotExist()) { return Result.buildFromRSAndMsg(ResultStatus.KAFKA_OPERATE_FAILED, "group不存在, 重置失败"); } @@ -345,32 +368,4 @@ public class GroupManagerImpl implements GroupManager { dto ); } - - private List convert2GroupTopicOverviewVOList(String groupName, String state, List groupTopicList, List metricsList) { - if (metricsList == null) { - metricsList = new ArrayList<>(); - } - - // - Map metricsMap = new HashMap<>(); - for (GroupMetrics metrics : metricsList) { - if (!groupName.equals(metrics.getGroup())) continue; - metricsMap.put(metrics.getTopic(), metrics); - } - - List voList = new ArrayList<>(); - for (GroupTopicMember po : groupTopicList) { - GroupTopicOverviewVO vo = ConvertUtil.obj2Obj(po, GroupTopicOverviewVO.class); - vo.setGroupName(groupName); - vo.setState(state); - GroupMetrics metrics = metricsMap.get(po.getTopicName()); - if (metrics != null) { - vo.setMaxLag(ConvertUtil.Float2Long(metrics.getMetrics().get(GroupMetricVersionItems.GROUP_METRIC_LAG))); - } - - voList.add(vo); - } - return voList; - } - } diff --git a/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/topic/impl/TopicConfigManagerImpl.java b/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/topic/impl/TopicConfigManagerImpl.java index d52ad657..ccb02cf6 100644 --- a/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/topic/impl/TopicConfigManagerImpl.java +++ b/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/topic/impl/TopicConfigManagerImpl.java @@ -16,7 +16,7 @@ import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerConfigService; import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService; import com.xiaojukeji.know.streaming.km.core.service.topic.TopicConfigService; -import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -27,7 +27,7 @@ import java.util.stream.Collectors; import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum.*; @Component -public class TopicConfigManagerImpl extends BaseVersionControlService implements TopicConfigManager { +public class TopicConfigManagerImpl extends BaseKafkaVersionControlService implements TopicConfigManager { private static final ILog log = LogFactory.getLog(TopicConfigManagerImpl.class); private static final String GET_DEFAULT_TOPIC_CONFIG = "getDefaultTopicConfig"; diff --git a/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/version/VersionControlManager.java b/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/version/VersionControlManager.java index 575a26d3..ea4a9dc2 100644 --- a/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/version/VersionControlManager.java +++ b/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/version/VersionControlManager.java @@ -20,7 +20,7 @@ public interface VersionControlManager { * 获取当前ks所有支持的kafka版本 * @return */ - Result> listAllVersions(); + Result> listAllKafkaVersions(); /** * 获取全部集群 clusterId 中类型为 type 的指标,不论支持不支持 @@ -28,7 +28,7 @@ public interface VersionControlManager { * @param type * @return */ - Result> listClusterVersionControlItem(Long clusterId, Integer type); + Result> listKafkaClusterVersionControlItem(Long clusterId, Integer type); /** * 获取当前用户设置的用于展示的指标配置 diff --git a/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/version/impl/VersionControlManagerImpl.java b/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/version/impl/VersionControlManagerImpl.java index 3ce527a1..6abfebba 100644 --- a/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/version/impl/VersionControlManagerImpl.java +++ b/km-biz/src/main/java/com/xiaojukeji/know/streaming/km/biz/version/impl/VersionControlManagerImpl.java @@ -17,6 +17,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.vo.version.VersionItemVO; import com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum; import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; import com.xiaojukeji.know.streaming.km.common.utils.VersionUtil; +import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService; import com.xiaojukeji.know.streaming.km.core.service.version.VersionControlService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -92,6 +93,9 @@ public class VersionControlManagerImpl implements VersionControlManager { defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_BYTES_OUT, true)); } + @Autowired + private ClusterPhyService clusterPhyService; + @Autowired private VersionControlService versionControlService; @@ -107,7 +111,13 @@ public class VersionControlManagerImpl implements VersionControlManager { allVersionItemVO.addAll(ConvertUtil.list2List(versionControlService.listVersionControlItem(METRIC_BROKER.getCode()), VersionItemVO.class)); allVersionItemVO.addAll(ConvertUtil.list2List(versionControlService.listVersionControlItem(METRIC_PARTITION.getCode()), VersionItemVO.class)); allVersionItemVO.addAll(ConvertUtil.list2List(versionControlService.listVersionControlItem(METRIC_REPLICATION.getCode()), VersionItemVO.class)); + allVersionItemVO.addAll(ConvertUtil.list2List(versionControlService.listVersionControlItem(METRIC_ZOOKEEPER.getCode()), VersionItemVO.class)); + + allVersionItemVO.addAll(ConvertUtil.list2List(versionControlService.listVersionControlItem(METRIC_CONNECT_CLUSTER.getCode()), VersionItemVO.class)); + allVersionItemVO.addAll(ConvertUtil.list2List(versionControlService.listVersionControlItem(METRIC_CONNECT_CONNECTOR.getCode()), VersionItemVO.class)); + allVersionItemVO.addAll(ConvertUtil.list2List(versionControlService.listVersionControlItem(METRIC_CONNECT_MIRROR_MAKER.getCode()), VersionItemVO.class)); + allVersionItemVO.addAll(ConvertUtil.list2List(versionControlService.listVersionControlItem(WEB_OP.getCode()), VersionItemVO.class)); Map map = allVersionItemVO.stream().collect( @@ -121,18 +131,20 @@ public class VersionControlManagerImpl implements VersionControlManager { } @Override - public Result> listAllVersions() { + public Result> listAllKafkaVersions() { return Result.buildSuc(VersionEnum.allVersionsWithOutMax()); } @Override - public Result> listClusterVersionControlItem(Long clusterId, Integer type) { + public Result> listKafkaClusterVersionControlItem(Long clusterId, Integer type) { List allItem = versionControlService.listVersionControlItem(type); List versionItemVOS = new ArrayList<>(); + String versionStr = clusterPhyService.getVersionFromCacheFirst(clusterId); + for (VersionControlItem item : allItem){ VersionItemVO itemVO = ConvertUtil.obj2Obj(item, VersionItemVO.class); - boolean support = versionControlService.isClusterSupport(clusterId, item); + boolean support = versionControlService.isClusterSupport(versionStr, item); itemVO.setSupport(support); itemVO.setDesc(itemSupportDesc(item, support)); @@ -145,7 +157,7 @@ public class VersionControlManagerImpl implements VersionControlManager { @Override public Result> listUserMetricItem(Long clusterId, Integer type, String operator) { - Result> ret = listClusterVersionControlItem(clusterId, type); + Result> ret = listKafkaClusterVersionControlItem(clusterId, type); if(null == ret || ret.failed()){ return Result.buildFail(); } diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/AbstractMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/AbstractMetricCollector.java index 7b6bce9a..ceb1fbff 100644 --- a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/AbstractMetricCollector.java +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/AbstractMetricCollector.java @@ -1,52 +1,26 @@ package com.xiaojukeji.know.streaming.km.collector.metric; -import com.didiglobal.logi.log.ILog; -import com.didiglobal.logi.log.LogFactory; import com.xiaojukeji.know.streaming.km.collector.service.CollectThreadPoolService; -import com.xiaojukeji.know.streaming.km.common.utils.LoggerUtil; -import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.event.metric.BaseMetricEvent; import com.xiaojukeji.know.streaming.km.common.component.SpringTool; import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum; -import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil; import org.springframework.beans.factory.annotation.Autowired; -import java.util.List; /** * @author didi */ -public abstract class AbstractMetricCollector { - protected static final ILog LOGGER = LogFactory.getLog(AbstractMetricCollector.class); - - protected static final ILog METRIC_COLLECTED_LOGGER = LoggerUtil.getMetricCollectedLogger(); - - public abstract List collectKafkaMetrics(ClusterPhy clusterPhy); +public abstract class AbstractMetricCollector { + public abstract String getClusterVersion(C c); public abstract VersionItemTypeEnum collectorType(); @Autowired private CollectThreadPoolService collectThreadPoolService; - public void collectMetrics(ClusterPhy clusterPhy) { - long startTime = System.currentTimeMillis(); - - // 采集指标 - List metricsList = this.collectKafkaMetrics(clusterPhy); - - // 输出耗时信息 - LOGGER.info( - "metricType={}||clusterPhyId={}||costTimeUnitMs={}", - this.collectorType().getMessage(), clusterPhy.getId(), System.currentTimeMillis() - startTime - ); - - // 输出采集到的指标信息 - METRIC_COLLECTED_LOGGER.debug("metricType={}||clusterPhyId={}||metrics={}!", - this.collectorType().getMessage(), clusterPhy.getId(), ConvertUtil.obj2Json(metricsList) - ); - } + public abstract void collectMetrics(C c); protected FutureWaitUtil getFutureUtilByClusterPhyId(Long clusterPhyId) { return collectThreadPoolService.selectSuitableFutureUtil(clusterPhyId * 1000L + this.collectorType().getCode()); diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/connect/AbstractConnectMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/connect/AbstractConnectMetricCollector.java new file mode 100644 index 00000000..78ca717c --- /dev/null +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/connect/AbstractConnectMetricCollector.java @@ -0,0 +1,50 @@ +package com.xiaojukeji.know.streaming.km.collector.metric.connect; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import com.xiaojukeji.know.streaming.km.common.utils.LoggerUtil; +import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.List; + +/** + * @author didi + */ +public abstract class AbstractConnectMetricCollector extends AbstractMetricCollector { + private static final ILog LOGGER = LogFactory.getLog(AbstractConnectMetricCollector.class); + + protected static final ILog METRIC_COLLECTED_LOGGER = LoggerUtil.getMetricCollectedLogger(); + + @Autowired + private ConnectClusterService connectClusterService; + + public abstract List collectConnectMetrics(ConnectCluster connectCluster); + + @Override + public String getClusterVersion(ConnectCluster connectCluster){ + return connectClusterService.getClusterVersion(connectCluster.getId()); + } + + @Override + public void collectMetrics(ConnectCluster connectCluster) { + long startTime = System.currentTimeMillis(); + + // 采集指标 + List metricsList = this.collectConnectMetrics(connectCluster); + + // 输出耗时信息 + LOGGER.info( + "metricType={}||connectClusterId={}||costTimeUnitMs={}", + this.collectorType().getMessage(), connectCluster.getId(), System.currentTimeMillis() - startTime + ); + + // 输出采集到的指标信息 + METRIC_COLLECTED_LOGGER.debug("metricType={}||connectClusterId={}||metrics={}!", + this.collectorType().getMessage(), connectCluster.getId(), ConvertUtil.obj2Json(metricsList) + ); + } +} diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/connect/ConnectClusterMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/connect/ConnectClusterMetricCollector.java new file mode 100644 index 00000000..df463ea1 --- /dev/null +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/connect/ConnectClusterMetricCollector.java @@ -0,0 +1,83 @@ +package com.xiaojukeji.know.streaming.km.collector.metric.connect; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectClusterMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionControlItem; +import com.xiaojukeji.know.streaming.km.common.bean.event.metric.connect.ConnectClusterMetricEvent; +import com.xiaojukeji.know.streaming.km.common.constant.Constant; +import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum; +import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil; +import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterMetricService; +import com.xiaojukeji.know.streaming.km.core.service.version.VersionControlService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.Collections; +import java.util.List; + +import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.METRIC_CONNECT_CLUSTER; + +/** + * @author didi + */ +@Component +public class ConnectClusterMetricCollector extends AbstractConnectMetricCollector { + protected static final ILog LOGGER = LogFactory.getLog(ConnectClusterMetricCollector.class); + + @Autowired + private VersionControlService versionControlService; + + @Autowired + private ConnectClusterMetricService connectClusterMetricService; + + @Override + public List collectConnectMetrics(ConnectCluster connectCluster) { + Long startTime = System.currentTimeMillis(); + Long clusterPhyId = connectCluster.getKafkaClusterPhyId(); + Long connectClusterId = connectCluster.getId(); + + ConnectClusterMetrics metrics = new ConnectClusterMetrics(clusterPhyId, connectClusterId); + metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, Constant.COLLECT_METRICS_ERROR_COST_TIME); + List items = versionControlService.listVersionControlItem(getClusterVersion(connectCluster), collectorType().getCode()); + FutureWaitUtil future = this.getFutureUtilByClusterPhyId(connectClusterId); + + for (VersionControlItem item : items) { + future.runnableTask( + String.format("class=ConnectClusterMetricCollector||connectClusterId=%d||metricName=%s", connectClusterId, item.getName()), + 30000, + () -> { + try { + Result ret = connectClusterMetricService.collectConnectClusterMetricsFromKafka(connectClusterId, item.getName()); + if (null == ret || !ret.hasData()) { + return null; + } + metrics.putMetric(ret.getData().getMetrics()); + + } catch (Exception e) { + LOGGER.error( + "method=collectConnectMetrics||connectClusterId={}||metricName={}||errMsg=exception!", + connectClusterId, item.getName(), e + ); + } + return null; + } + ); + } + + future.waitExecute(30000); + + metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, (System.currentTimeMillis() - startTime) / 1000.0f); + + this.publishMetric(new ConnectClusterMetricEvent(this, Collections.singletonList(metrics))); + + return Collections.singletonList(metrics); + } + + @Override + public VersionItemTypeEnum collectorType() { + return METRIC_CONNECT_CLUSTER; + } +} diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/connect/ConnectConnectorMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/connect/ConnectConnectorMetricCollector.java new file mode 100644 index 00000000..282ce870 --- /dev/null +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/connect/ConnectConnectorMetricCollector.java @@ -0,0 +1,102 @@ +package com.xiaojukeji.know.streaming.km.collector.metric.connect; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectorMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionControlItem; +import com.xiaojukeji.know.streaming.km.common.bean.event.metric.connect.ConnectorMetricEvent; +import com.xiaojukeji.know.streaming.km.common.constant.Constant; +import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum; +import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum; +import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil; +import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorMetricService; +import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService; +import com.xiaojukeji.know.streaming.km.core.service.version.VersionControlService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.METRIC_CONNECT_CONNECTOR; + +/** + * @author didi + */ +@Component +public class ConnectConnectorMetricCollector extends AbstractConnectMetricCollector { + protected static final ILog LOGGER = LogFactory.getLog(ConnectConnectorMetricCollector.class); + + @Autowired + private VersionControlService versionControlService; + + @Autowired + private ConnectorService connectorService; + + @Autowired + private ConnectorMetricService connectorMetricService; + + @Override + public List collectConnectMetrics(ConnectCluster connectCluster) { + Long clusterPhyId = connectCluster.getKafkaClusterPhyId(); + Long connectClusterId = connectCluster.getId(); + + List items = versionControlService.listVersionControlItem(this.getClusterVersion(connectCluster), collectorType().getCode()); + Result> connectorList = connectorService.listConnectorsFromCluster(connectClusterId); + + FutureWaitUtil future = this.getFutureUtilByClusterPhyId(connectClusterId); + + List metricsList = new ArrayList<>(); + for (String connectorName : connectorList.getData()) { + ConnectorMetrics metrics = new ConnectorMetrics(connectClusterId, connectorName); + metrics.setClusterPhyId(clusterPhyId); + + metricsList.add(metrics); + future.runnableTask( + String.format("class=ConnectConnectorMetricCollector||connectClusterId=%d||connectorName=%s", connectClusterId, connectorName), + 30000, + () -> collectMetrics(connectClusterId, connectorName, metrics, items) + ); + } + future.waitResult(30000); + + this.publishMetric(new ConnectorMetricEvent(this, metricsList)); + + return metricsList; + } + + @Override + public VersionItemTypeEnum collectorType() { + return METRIC_CONNECT_CONNECTOR; + } + + /**************************************************** private method ****************************************************/ + + private void collectMetrics(Long connectClusterId, String connectorName, ConnectorMetrics metrics, List items) { + long startTime = System.currentTimeMillis(); + ConnectorTypeEnum connectorType = connectorService.getConnectorType(connectClusterId, connectorName); + + metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, Constant.COLLECT_METRICS_ERROR_COST_TIME); + + for (VersionControlItem v : items) { + try { + Result ret = connectorMetricService.collectConnectClusterMetricsFromKafka(connectClusterId, connectorName, v.getName(), connectorType); + if (null == ret || ret.failed() || null == ret.getData()) { + continue; + } + + metrics.putMetric(ret.getData().getMetrics()); + } catch (Exception e) { + LOGGER.error( + "method=collectMetrics||connectClusterId={}||connectorName={}||metric={}||errMsg=exception!", + connectClusterId, connectorName, v.getName(), e + ); + } + } + + // 记录采集性能 + metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, (System.currentTimeMillis() - startTime) / 1000.0f); + } +} diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/AbstractKafkaMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/AbstractKafkaMetricCollector.java new file mode 100644 index 00000000..4c995cfb --- /dev/null +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/AbstractKafkaMetricCollector.java @@ -0,0 +1,50 @@ +package com.xiaojukeji.know.streaming.km.collector.metric.kafka; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector; +import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import com.xiaojukeji.know.streaming.km.common.utils.LoggerUtil; +import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.List; + +/** + * @author didi + */ +public abstract class AbstractKafkaMetricCollector extends AbstractMetricCollector { + private static final ILog LOGGER = LogFactory.getLog(AbstractMetricCollector.class); + + protected static final ILog METRIC_COLLECTED_LOGGER = LoggerUtil.getMetricCollectedLogger(); + + @Autowired + private ClusterPhyService clusterPhyService; + + public abstract List collectKafkaMetrics(ClusterPhy clusterPhy); + + @Override + public String getClusterVersion(ClusterPhy clusterPhy){ + return clusterPhyService.getVersionFromCacheFirst(clusterPhy.getId()); + } + + @Override + public void collectMetrics(ClusterPhy clusterPhy) { + long startTime = System.currentTimeMillis(); + + // 采集指标 + List metricsList = this.collectKafkaMetrics(clusterPhy); + + // 输出耗时信息 + LOGGER.info( + "metricType={}||clusterPhyId={}||costTimeUnitMs={}", + this.collectorType().getMessage(), clusterPhy.getId(), System.currentTimeMillis() - startTime + ); + + // 输出采集到的指标信息 + METRIC_COLLECTED_LOGGER.debug("metricType={}||clusterPhyId={}||metrics={}!", + this.collectorType().getMessage(), clusterPhy.getId(), ConvertUtil.obj2Json(metricsList) + ); + } +} diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/BrokerMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/BrokerMetricCollector.java index 1753a875..6ae2a063 100644 --- a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/BrokerMetricCollector.java +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/BrokerMetricCollector.java @@ -2,7 +2,6 @@ package com.xiaojukeji.know.streaming.km.collector.metric.kafka; import com.didiglobal.logi.log.ILog; import com.didiglobal.logi.log.LogFactory; -import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector; import com.xiaojukeji.know.streaming.km.common.bean.entity.broker.Broker; import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BrokerMetrics; @@ -27,7 +26,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT * @author didi */ @Component -public class BrokerMetricCollector extends AbstractMetricCollector { +public class BrokerMetricCollector extends AbstractKafkaMetricCollector { private static final ILog LOGGER = LogFactory.getLog(BrokerMetricCollector.class); @Autowired @@ -44,7 +43,7 @@ public class BrokerMetricCollector extends AbstractMetricCollector brokers = brokerService.listAliveBrokersFromDB(clusterPhy.getId()); - List items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode()); + List items = versionControlService.listVersionControlItem(this.getClusterVersion(clusterPhy), collectorType().getCode()); FutureWaitUtil future = this.getFutureUtilByClusterPhyId(clusterPhyId); diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ClusterMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ClusterMetricCollector.java index e70bf6f9..f918a0d5 100644 --- a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ClusterMetricCollector.java +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ClusterMetricCollector.java @@ -2,7 +2,6 @@ package com.xiaojukeji.know.streaming.km.collector.metric.kafka; import com.didiglobal.logi.log.ILog; import com.didiglobal.logi.log.LogFactory; -import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector; import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.ClusterMetrics; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; @@ -25,7 +24,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT * @author didi */ @Component -public class ClusterMetricCollector extends AbstractMetricCollector { +public class ClusterMetricCollector extends AbstractKafkaMetricCollector { protected static final ILog LOGGER = LogFactory.getLog(ClusterMetricCollector.class); @Autowired @@ -38,7 +37,7 @@ public class ClusterMetricCollector extends AbstractMetricCollector collectKafkaMetrics(ClusterPhy clusterPhy) { Long startTime = System.currentTimeMillis(); Long clusterPhyId = clusterPhy.getId(); - List items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode()); + List items = versionControlService.listVersionControlItem(this.getClusterVersion(clusterPhy), collectorType().getCode()); ClusterMetrics metrics = new ClusterMetrics(clusterPhyId, clusterPhy.getKafkaVersion()); metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, Constant.COLLECT_METRICS_ERROR_COST_TIME); diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/GroupMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/GroupMetricCollector.java index 3c1f0df4..5e04466f 100644 --- a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/GroupMetricCollector.java +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/GroupMetricCollector.java @@ -2,7 +2,6 @@ package com.xiaojukeji.know.streaming.km.collector.metric.kafka; import com.didiglobal.logi.log.ILog; import com.didiglobal.logi.log.LogFactory; -import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector; import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.GroupMetrics; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; @@ -28,7 +27,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT * @author didi */ @Component -public class GroupMetricCollector extends AbstractMetricCollector { +public class GroupMetricCollector extends AbstractKafkaMetricCollector { protected static final ILog LOGGER = LogFactory.getLog(GroupMetricCollector.class); @Autowired @@ -46,7 +45,7 @@ public class GroupMetricCollector extends AbstractMetricCollector List groupNameList = new ArrayList<>(); try { - groupNameList = groupService.listGroupsFromKafka(clusterPhyId); + groupNameList = groupService.listGroupsFromKafka(clusterPhy); } catch (Exception e) { LOGGER.error("method=collectKafkaMetrics||clusterPhyId={}||msg=exception!", clusterPhyId, e); } @@ -55,7 +54,7 @@ public class GroupMetricCollector extends AbstractMetricCollector return Collections.emptyList(); } - List items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode()); + List items = versionControlService.listVersionControlItem(this.getClusterVersion(clusterPhy), collectorType().getCode()); FutureWaitUtil future = this.getFutureUtilByClusterPhyId(clusterPhyId); diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/PartitionMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/PartitionMetricCollector.java index fbb710b9..30d2cf4b 100644 --- a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/PartitionMetricCollector.java +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/PartitionMetricCollector.java @@ -2,7 +2,6 @@ package com.xiaojukeji.know.streaming.km.collector.metric.kafka; import com.didiglobal.logi.log.ILog; import com.didiglobal.logi.log.LogFactory; -import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector; import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.PartitionMetrics; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; @@ -26,7 +25,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT * @author didi */ @Component -public class PartitionMetricCollector extends AbstractMetricCollector { +public class PartitionMetricCollector extends AbstractKafkaMetricCollector { protected static final ILog LOGGER = LogFactory.getLog(PartitionMetricCollector.class); @Autowired @@ -42,7 +41,7 @@ public class PartitionMetricCollector extends AbstractMetricCollector collectKafkaMetrics(ClusterPhy clusterPhy) { Long clusterPhyId = clusterPhy.getId(); List topicList = topicService.listTopicsFromCacheFirst(clusterPhyId); - List items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode()); + List items = versionControlService.listVersionControlItem(this.getClusterVersion(clusterPhy), collectorType().getCode()); FutureWaitUtil future = this.getFutureUtilByClusterPhyId(clusterPhyId); diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ReplicaMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ReplicaMetricCollector.java index c042ae1d..e6c5efcd 100644 --- a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ReplicaMetricCollector.java +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ReplicaMetricCollector.java @@ -2,7 +2,6 @@ package com.xiaojukeji.know.streaming.km.collector.metric.kafka; import com.didiglobal.logi.log.ILog; import com.didiglobal.logi.log.LogFactory; -import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector; import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.ReplicationMetrics; import com.xiaojukeji.know.streaming.km.common.bean.entity.partition.Partition; @@ -27,7 +26,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT * @author didi */ @Component -public class ReplicaMetricCollector extends AbstractMetricCollector { +public class ReplicaMetricCollector extends AbstractKafkaMetricCollector { protected static final ILog LOGGER = LogFactory.getLog(ReplicaMetricCollector.class); @Autowired @@ -42,8 +41,8 @@ public class ReplicaMetricCollector extends AbstractMetricCollector collectKafkaMetrics(ClusterPhy clusterPhy) { Long clusterPhyId = clusterPhy.getId(); - List items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode()); List partitions = partitionService.listPartitionFromCacheFirst(clusterPhyId); + List items = versionControlService.listVersionControlItem(this.getClusterVersion(clusterPhy), collectorType().getCode()); FutureWaitUtil future = this.getFutureUtilByClusterPhyId(clusterPhyId); diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/TopicMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/TopicMetricCollector.java index bec9f706..3cd16a20 100644 --- a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/TopicMetricCollector.java +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/TopicMetricCollector.java @@ -2,7 +2,6 @@ package com.xiaojukeji.know.streaming.km.collector.metric.kafka; import com.didiglobal.logi.log.ILog; import com.didiglobal.logi.log.LogFactory; -import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector; import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.TopicMetrics; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; @@ -30,7 +29,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT * @author didi */ @Component -public class TopicMetricCollector extends AbstractMetricCollector { +public class TopicMetricCollector extends AbstractKafkaMetricCollector { protected static final ILog LOGGER = LogFactory.getLog(TopicMetricCollector.class); @Autowired @@ -48,7 +47,7 @@ public class TopicMetricCollector extends AbstractMetricCollector public List collectKafkaMetrics(ClusterPhy clusterPhy) { Long clusterPhyId = clusterPhy.getId(); List topics = topicService.listTopicsFromCacheFirst(clusterPhyId); - List items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode()); + List items = versionControlService.listVersionControlItem(this.getClusterVersion(clusterPhy), collectorType().getCode()); FutureWaitUtil future = this.getFutureUtilByClusterPhyId(clusterPhyId); diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ZookeeperMetricCollector.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ZookeeperMetricCollector.java index f84457c4..314bf728 100644 --- a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ZookeeperMetricCollector.java +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/metric/kafka/ZookeeperMetricCollector.java @@ -2,7 +2,6 @@ package com.xiaojukeji.know.streaming.km.collector.metric.kafka; import com.didiglobal.logi.log.ILog; import com.didiglobal.logi.log.LogFactory; -import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector; import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.entity.config.ZKConfig; import com.xiaojukeji.know.streaming.km.common.bean.entity.kafkacontroller.KafkaController; @@ -34,7 +33,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT * @author didi */ @Component -public class ZookeeperMetricCollector extends AbstractMetricCollector { +public class ZookeeperMetricCollector extends AbstractKafkaMetricCollector { protected static final ILog LOGGER = LogFactory.getLog(ZookeeperMetricCollector.class); @Autowired @@ -53,7 +52,7 @@ public class ZookeeperMetricCollector extends AbstractMetricCollector collectKafkaMetrics(ClusterPhy clusterPhy) { Long startTime = System.currentTimeMillis(); Long clusterPhyId = clusterPhy.getId(); - List items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode()); + List items = versionControlService.listVersionControlItem(this.getClusterVersion(clusterPhy), collectorType().getCode()); List aliveZKList = zookeeperService.listFromDBByCluster(clusterPhyId) .stream() .filter(elem -> Constant.ALIVE.equals(elem.getStatus())) diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/sink/connect/ConnectClusterMetricESSender.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/sink/connect/ConnectClusterMetricESSender.java new file mode 100644 index 00000000..25bd7a3a --- /dev/null +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/sink/connect/ConnectClusterMetricESSender.java @@ -0,0 +1,33 @@ +package com.xiaojukeji.know.streaming.km.collector.sink.connect; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.collector.sink.AbstractMetricESSender; +import com.xiaojukeji.know.streaming.km.common.bean.event.metric.connect.ConnectClusterMetricEvent; +import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.connect.ConnectClusterMetricPO; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import org.springframework.context.ApplicationListener; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; + +import static com.xiaojukeji.know.streaming.km.persistence.es.template.TemplateConstant.CONNECT_CLUSTER_INDEX; + +/** + * @author wyb + * @date 2022/11/7 + */ +@Component +public class ConnectClusterMetricESSender extends AbstractMetricESSender implements ApplicationListener { + protected static final ILog LOGGER = LogFactory.getLog(ConnectClusterMetricESSender.class); + + @PostConstruct + public void init(){ + LOGGER.info("class=ConnectClusterMetricESSender||method=init||msg=init finished"); + } + + @Override + public void onApplicationEvent(ConnectClusterMetricEvent event) { + send2es(CONNECT_CLUSTER_INDEX, ConvertUtil.list2List(event.getConnectClusterMetrics(), ConnectClusterMetricPO.class)); + } +} diff --git a/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/sink/connect/ConnectorMetricESSender.java b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/sink/connect/ConnectorMetricESSender.java new file mode 100644 index 00000000..4234c974 --- /dev/null +++ b/km-collector/src/main/java/com/xiaojukeji/know/streaming/km/collector/sink/connect/ConnectorMetricESSender.java @@ -0,0 +1,33 @@ +package com.xiaojukeji.know.streaming.km.collector.sink.connect; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.collector.sink.AbstractMetricESSender; +import com.xiaojukeji.know.streaming.km.common.bean.event.metric.connect.ConnectorMetricEvent; +import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.connect.ConnectorMetricPO; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import org.springframework.context.ApplicationListener; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; + +import static com.xiaojukeji.know.streaming.km.persistence.es.template.TemplateConstant.CONNECT_CONNECTOR_INDEX; + +/** + * @author wyb + * @date 2022/11/7 + */ +@Component +public class ConnectorMetricESSender extends AbstractMetricESSender implements ApplicationListener { + protected static final ILog LOGGER = LogFactory.getLog(ConnectorMetricESSender.class); + + @PostConstruct + public void init(){ + LOGGER.info("class=ConnectorMetricESSender||method=init||msg=init finished"); + } + + @Override + public void onApplicationEvent(ConnectorMetricEvent event) { + send2es(CONNECT_CONNECTOR_INDEX, ConvertUtil.list2List(event.getConnectorMetricsList(), ConnectorMetricPO.class)); + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/cluster/ClusterConnectorsOverviewDTO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/cluster/ClusterConnectorsOverviewDTO.java new file mode 100644 index 00000000..75970724 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/cluster/ClusterConnectorsOverviewDTO.java @@ -0,0 +1,28 @@ +package com.xiaojukeji.know.streaming.km.common.bean.dto.cluster; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricDTO; +import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationSortDTO; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import javax.validation.constraints.NotNull; +import java.util.List; + + +/** + * @author zengqiao + * @date 22/02/24 + */ +@Data +public class ClusterConnectorsOverviewDTO extends PaginationSortDTO { + @NotNull(message = "latestMetricNames不允许为空") + @ApiModelProperty("需要指标点的信息") + private List latestMetricNames; + + @NotNull(message = "metricLines不允许为空") + @ApiModelProperty("需要指标曲线的信息") + private MetricDTO metricLines; + + @ApiModelProperty("需要排序的指标名称列表,比较第一个不为空的metric") + private List sortMetricNameList; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/ClusterConnectorDTO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/ClusterConnectorDTO.java new file mode 100644 index 00000000..71cfcce8 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/ClusterConnectorDTO.java @@ -0,0 +1,32 @@ +package com.xiaojukeji.know.streaming.km.common.bean.dto.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.BaseDTO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; +import lombok.NoArgsConstructor; + +import javax.validation.constraints.NotBlank; +import javax.validation.constraints.NotNull; + +/** + * @author zengqiao + * @date 2022-10-17 + */ +@Data +@NoArgsConstructor +@ApiModel(description = "集群Connector") +public class ClusterConnectorDTO extends BaseDTO { + @NotNull(message = "connectClusterId不允许为空") + @ApiModelProperty(value = "Connector集群ID", example = "1") + private Long connectClusterId; + + @NotBlank(message = "name不允许为空串") + @ApiModelProperty(value = "Connector名称", example = "know-streaming-connector") + private String connectorName; + + public ClusterConnectorDTO(Long connectClusterId, String connectorName) { + this.connectClusterId = connectClusterId; + this.connectorName = connectorName; + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/cluster/ConnectClusterDTO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/cluster/ConnectClusterDTO.java new file mode 100644 index 00000000..a8ca1ab2 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/cluster/ConnectClusterDTO.java @@ -0,0 +1,29 @@ +package com.xiaojukeji.know.streaming.km.common.bean.dto.connect.cluster; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.BaseDTO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * @author zengqiao + * @date 2022-10-17 + */ +@Data +@ApiModel(description = "集群Connector") +public class ConnectClusterDTO extends BaseDTO { + @ApiModelProperty(value = "Connect集群ID", example = "1") + private Long id; + + @ApiModelProperty(value = "Connect集群名称", example = "know-streaming") + private String name; + + @ApiModelProperty(value = "Connect集群URL", example = "http://127.0.0.1:8080") + private String clusterUrl; + + @ApiModelProperty(value = "Connect集群版本", example = "2.5.1") + private String version; + + @ApiModelProperty(value = "JMX配置", example = "") + private String jmxProperties; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorActionDTO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorActionDTO.java new file mode 100644 index 00000000..f4294d68 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorActionDTO.java @@ -0,0 +1,20 @@ +package com.xiaojukeji.know.streaming.km.common.bean.dto.connect.connector; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.ClusterConnectorDTO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import javax.validation.constraints.NotBlank; + +/** + * @author zengqiao + * @date 2022-10-17 + */ +@Data +@ApiModel(description = "操作Connector") +public class ConnectorActionDTO extends ClusterConnectorDTO { + @NotBlank(message = "action不允许为空串") + @ApiModelProperty(value = "Connector名称", example = "stop|restart|resume") + private String action; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorConfigModifyDTO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorConfigModifyDTO.java new file mode 100644 index 00000000..40f617c8 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorConfigModifyDTO.java @@ -0,0 +1,21 @@ +package com.xiaojukeji.know.streaming.km.common.bean.dto.connect.connector; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.ClusterConnectorDTO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import javax.validation.constraints.NotNull; +import java.util.Properties; + +/** + * @author zengqiao + * @date 2022-10-17 + */ +@Data +@ApiModel(description = "修改Connector配置") +public class ConnectorConfigModifyDTO extends ClusterConnectorDTO { + @NotNull(message = "configs不允许为空") + @ApiModelProperty(value = "配置", example = "") + private Properties configs; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorCreateDTO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorCreateDTO.java new file mode 100644 index 00000000..a2272118 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorCreateDTO.java @@ -0,0 +1,21 @@ +package com.xiaojukeji.know.streaming.km.common.bean.dto.connect.connector; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.ClusterConnectorDTO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import javax.validation.constraints.NotNull; +import java.util.Properties; + +/** + * @author zengqiao + * @date 2022-10-17 + */ +@Data +@ApiModel(description = "创建Connector") +public class ConnectorCreateDTO extends ClusterConnectorDTO { + @NotNull(message = "configs不允许为空") + @ApiModelProperty(value = "配置", example = "") + private Properties configs; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorDeleteDTO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorDeleteDTO.java new file mode 100644 index 00000000..55dce017 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/connector/ConnectorDeleteDTO.java @@ -0,0 +1,14 @@ +package com.xiaojukeji.know.streaming.km.common.bean.dto.connect.connector; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.ClusterConnectorDTO; +import io.swagger.annotations.ApiModel; +import lombok.Data; + +/** + * @author zengqiao + * @date 2022-10-17 + */ +@Data +@ApiModel(description = "删除Connector") +public class ConnectorDeleteDTO extends ClusterConnectorDTO { +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/task/TaskActionDTO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/task/TaskActionDTO.java new file mode 100644 index 00000000..a5d99188 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/connect/task/TaskActionDTO.java @@ -0,0 +1,20 @@ +package com.xiaojukeji.know.streaming.km.common.bean.dto.connect.task; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.connector.ConnectorActionDTO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import javax.validation.constraints.NotNull; + +/** + * @author zengqiao + * @date 2022-10-17 + */ +@Data +@ApiModel(description = "操作Task") +public class TaskActionDTO extends ConnectorActionDTO { + @NotNull(message = "taskId不允许为NULL") + @ApiModelProperty(value = "taskId", example = "123") + private Long taskId; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/metrices/connect/MetricsConnectClustersDTO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/metrices/connect/MetricsConnectClustersDTO.java new file mode 100644 index 00000000..7986553c --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/metrices/connect/MetricsConnectClustersDTO.java @@ -0,0 +1,22 @@ +package com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricDTO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.util.List; + +/** + * @author didi + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +@ApiModel(description = "Connect集群指标查询信息") +public class MetricsConnectClustersDTO extends MetricDTO { + @ApiModelProperty("Connect集群ID") + private List connectClusterIdList; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/metrices/connect/MetricsConnectorsDTO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/metrices/connect/MetricsConnectorsDTO.java new file mode 100644 index 00000000..a51700f1 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/dto/metrices/connect/MetricsConnectorsDTO.java @@ -0,0 +1,23 @@ +package com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.ClusterConnectorDTO; +import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricDTO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.util.List; + +/** + * @author didi + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +@ApiModel(description = "Connector指标查询信息") +public class MetricsConnectorsDTO extends MetricDTO { + @ApiModelProperty("Connector列表") + private List connectorNameList; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/broker/Broker.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/broker/Broker.java index a1e39f34..752aade0 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/broker/Broker.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/broker/Broker.java @@ -3,7 +3,6 @@ package com.xiaojukeji.know.streaming.km.common.bean.entity.broker; import com.alibaba.fastjson.TypeReference; import com.xiaojukeji.know.streaming.km.common.bean.entity.common.IpPortData; -import com.xiaojukeji.know.streaming.km.common.bean.entity.config.JmxConfig; import com.xiaojukeji.know.streaming.km.common.bean.po.broker.BrokerPO; import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; import lombok.AllArgsConstructor; diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/config/metric/UserMetricConfig.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/config/metric/UserMetricConfig.java index e244181a..171cd68f 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/config/metric/UserMetricConfig.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/config/metric/UserMetricConfig.java @@ -1,7 +1,5 @@ package com.xiaojukeji.know.streaming.km.common.bean.entity.config.metric; -import com.xiaojukeji.know.streaming.km.common.constant.Constant; -import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/ConnectCluster.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/ConnectCluster.java new file mode 100644 index 00000000..a4c67bbc --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/ConnectCluster.java @@ -0,0 +1,61 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.EntityIdInterface; +import lombok.Data; + +import java.io.Serializable; + +@Data +public class ConnectCluster implements Serializable, Comparable, EntityIdInterface { + /** + * 集群ID + */ + private Long id; + + /** + * 集群名字 + */ + private String name; + + /** + * 集群使用的消费组 + */ + private String groupName; + + /** + * 集群使用的消费组状态,也表示集群状态 + * @see com.xiaojukeji.know.streaming.km.common.enums.group.GroupStateEnum + */ + private Integer state; + + /** + * worker中显示的leader url信息 + */ + private String memberLeaderUrl; + + /** + * 版本信息 + */ + private String version; + + /** + * jmx配置 + * @see com.xiaojukeji.know.streaming.km.common.bean.entity.config.JmxConfig + */ + private String jmxProperties; + + /** + * Kafka集群ID + */ + private Long kafkaClusterPhyId; + + /** + * 集群地址 + */ + private String clusterUrl; + + @Override + public int compareTo(ConnectCluster connectCluster) { + return this.id.compareTo(connectCluster.getId()); + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/ConnectClusterMetadata.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/ConnectClusterMetadata.java new file mode 100644 index 00000000..b3243756 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/ConnectClusterMetadata.java @@ -0,0 +1,38 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect; + +import com.xiaojukeji.know.streaming.km.common.enums.group.GroupStateEnum; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.io.Serializable; + +@Data +@NoArgsConstructor +public class ConnectClusterMetadata implements Serializable { + /** + * Kafka集群名字 + */ + private Long kafkaClusterPhyId; + + /** + * 集群使用的消费组 + */ + private String groupName; + + /** + * 集群使用的消费组状态,也表示集群状态 + */ + private GroupStateEnum state; + + /** + * worker中显示的leader url信息 + */ + private String memberLeaderUrl; + + public ConnectClusterMetadata(Long kafkaClusterPhyId, String groupName, GroupStateEnum state, String memberLeaderUrl) { + this.kafkaClusterPhyId = kafkaClusterPhyId; + this.groupName = groupName; + this.state = state; + this.memberLeaderUrl = memberLeaderUrl; + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/ConnectWorker.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/ConnectWorker.java new file mode 100644 index 00000000..69a4f747 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/ConnectWorker.java @@ -0,0 +1,87 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.common.utils.CommonUtils; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.io.Serializable; +import java.net.URI; + +@Data +@NoArgsConstructor +public class ConnectWorker implements Serializable { + + protected static final ILog LOGGER = LogFactory.getLog(ConnectWorker.class); + + /** + * Kafka集群ID + */ + private Long kafkaClusterPhyId; + + /** + * 集群ID + */ + private Long connectClusterId; + + /** + * 成员ID + */ + private String memberId; + + /** + * 主机 + */ + private String host; + + /** + * Jmx端口 + */ + private Integer jmxPort; + + /** + * URL + */ + private String url; + + /** + * leader的URL + */ + private String leaderUrl; + + /** + * 1:是leader,0:不是leader + */ + private Integer leader; + + /** + * worker地址 + */ + private String workerId; + + public ConnectWorker(Long kafkaClusterPhyId, + Long connectClusterId, + String memberId, + String host, + Integer jmxPort, + String url, + String leaderUrl, + Integer leader) { + this.kafkaClusterPhyId = kafkaClusterPhyId; + this.connectClusterId = connectClusterId; + this.memberId = memberId; + this.host = host; + this.jmxPort = jmxPort; + this.url = url; + this.leaderUrl = leaderUrl; + this.leader = leader; + String workerId = CommonUtils.getWorkerId(url); + if (workerId == null) { + workerId = memberId; + LOGGER.error("class=ConnectWorker||connectClusterId={}||memberId={}||url={}||msg=analysis url fail" + , connectClusterId, memberId, url); + } + this.workerId = workerId; + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/WorkerConnector.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/WorkerConnector.java new file mode 100644 index 00000000..423e21ce --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/WorkerConnector.java @@ -0,0 +1,58 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect; + +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.io.Serializable; + +@Data +@NoArgsConstructor +public class WorkerConnector implements Serializable { + /** + * connect集群ID + */ + private Long connectClusterId; + + /** + * kafka集群ID + */ + private Long kafkaClusterPhyId; + + /** + * connector名称 + */ + private String connectorName; + + private String workerMemberId; + + /** + * 任务状态 + */ + private String state; + + /** + * 任务ID + */ + private Integer taskId; + + /** + * worker信息 + */ + private String workerId; + + /** + * 错误原因 + */ + private String trace; + + public WorkerConnector(Long kafkaClusterPhyId, Long connectClusterId, String connectorName, String workerMemberId, Integer taskId, String state, String workerId, String trace) { + this.kafkaClusterPhyId = kafkaClusterPhyId; + this.connectClusterId = connectClusterId; + this.connectorName = connectorName; + this.workerMemberId = workerMemberId; + this.taskId = taskId; + this.state = state; + this.workerId = workerId; + this.trace = trace; + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigInfo.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigInfo.java new file mode 100644 index 00000000..ffe8c332 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigInfo.java @@ -0,0 +1,19 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect.config; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import org.apache.kafka.connect.runtime.rest.entities.ConfigInfo; + + +/** + * @see ConfigInfo + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ConnectConfigInfo { + private ConnectConfigKeyInfo definition; + + private ConnectConfigValueInfo value; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigInfos.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigInfos.java new file mode 100644 index 00000000..bb8b773f --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigInfos.java @@ -0,0 +1,62 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect.config; + + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import org.apache.kafka.connect.runtime.rest.entities.ConfigInfo; +import org.apache.kafka.connect.runtime.rest.entities.ConfigInfos; + +import java.util.ArrayList; +import java.util.List; + +/** + * @see ConfigInfos + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ConnectConfigInfos { + private String name; + + private int errorCount; + + private List groups; + + private List configs; + + public ConnectConfigInfos(ConfigInfos configInfos) { + this.name = configInfos.name(); + this.errorCount = configInfos.errorCount(); + this.groups = configInfos.groups(); + + this.configs = new ArrayList<>(); + for (ConfigInfo configInfo: configInfos.values()) { + ConnectConfigKeyInfo definition = new ConnectConfigKeyInfo(); + definition.setName(configInfo.configKey().name()); + definition.setType(configInfo.configKey().type()); + definition.setRequired(configInfo.configKey().required()); + definition.setDefaultValue(configInfo.configKey().defaultValue()); + definition.setImportance(configInfo.configKey().importance()); + definition.setDocumentation(configInfo.configKey().documentation()); + definition.setGroup(configInfo.configKey().group()); + definition.setOrderInGroup(configInfo.configKey().orderInGroup()); + definition.setWidth(configInfo.configKey().width()); + definition.setDisplayName(configInfo.configKey().displayName()); + definition.setDependents(configInfo.configKey().dependents()); + + ConnectConfigValueInfo value = new ConnectConfigValueInfo(); + value.setName(configInfo.configValue().name()); + value.setValue(configInfo.configValue().value()); + value.setRecommendedValues(configInfo.configValue().recommendedValues()); + value.setErrors(configInfo.configValue().errors()); + value.setVisible(configInfo.configValue().visible()); + + ConnectConfigInfo connectConfigInfo = new ConnectConfigInfo(); + connectConfigInfo.setDefinition(definition); + connectConfigInfo.setValue(value); + + this.configs.add(connectConfigInfo); + } + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigKeyInfo.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigKeyInfo.java new file mode 100644 index 00000000..13ada833 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigKeyInfo.java @@ -0,0 +1,38 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect.config; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import org.apache.kafka.connect.runtime.rest.entities.ConfigKeyInfo; + +import java.util.List; + +/** + * @see ConfigKeyInfo + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ConnectConfigKeyInfo { + private String name; + + private String type; + + private boolean required; + + private String defaultValue; + + private String importance; + + private String documentation; + + private String group; + + private int orderInGroup; + + private String width; + + private String displayName; + + private List dependents; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigValueInfo.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigValueInfo.java new file mode 100644 index 00000000..af2aecf5 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/config/ConnectConfigValueInfo.java @@ -0,0 +1,27 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect.config; + + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import org.apache.kafka.connect.runtime.rest.entities.ConfigValueInfo; + +import java.util.List; + +/** + * @see ConfigValueInfo + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ConnectConfigValueInfo { + private String name; + + private String value; + + private List recommendedValues; + + private List errors; + + private boolean visible; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSAbstractConnectState.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSAbstractConnectState.java new file mode 100644 index 00000000..a5525768 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSAbstractConnectState.java @@ -0,0 +1,20 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector; + +import com.alibaba.fastjson.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Data; +import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo; + +/** + * @see ConnectorStateInfo.AbstractState + */ +@Data +public abstract class KSAbstractConnectState { + private String state; + + private String trace; + + @JSONField(name="worker_id") + @JsonProperty("worker_id") + private String workerId; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnector.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnector.java new file mode 100644 index 00000000..b8fab0b6 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnector.java @@ -0,0 +1,48 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector; + +import lombok.Data; + +import java.io.Serializable; + +@Data +public class KSConnector implements Serializable { + /** + * Kafka集群ID + */ + private Long kafkaClusterPhyId; + + /** + * connect集群ID + */ + private Long connectClusterId; + + /** + * connector名称 + */ + private String connectorName; + + /** + * connector类名 + */ + private String connectorClassName; + + /** + * connector类型 + */ + private String connectorType; + + /** + * 访问过的Topic列表 + */ + private String topics; + + /** + * task数 + */ + private Integer taskCount; + + /** + * 状态 + */ + private String state; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnectorInfo.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnectorInfo.java new file mode 100644 index 00000000..f1c3ed31 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnectorInfo.java @@ -0,0 +1,26 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector; + +import lombok.Data; +import org.apache.kafka.connect.runtime.rest.entities.ConnectorType; +import org.apache.kafka.connect.util.ConnectorTaskId; + +import java.io.Serializable; +import java.util.List; +import java.util.Map; + +/** + * copy from: + * @see org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo + */ +@Data +public class KSConnectorInfo implements Serializable { + private Long connectClusterId; + + private String name; + + private Map config; + + private List tasks; + + private ConnectorType type; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnectorState.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnectorState.java new file mode 100644 index 00000000..9cd9ea74 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnectorState.java @@ -0,0 +1,11 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector; + +import lombok.Data; +import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo; + +/** + * @see ConnectorStateInfo.ConnectorState + */ +@Data +public class KSConnectorState extends KSAbstractConnectState { +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnectorStateInfo.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnectorStateInfo.java new file mode 100644 index 00000000..31d6657b --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSConnectorStateInfo.java @@ -0,0 +1,21 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector; + +import lombok.Data; +import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo; +import org.apache.kafka.connect.runtime.rest.entities.ConnectorType; + +import java.util.List; + +/** + * @see ConnectorStateInfo + */ +@Data +public class KSConnectorStateInfo { + private String name; + + private KSConnectorState connector; + + private List tasks; + + private ConnectorType type; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSTaskState.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSTaskState.java new file mode 100644 index 00000000..323291b0 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/connector/KSTaskState.java @@ -0,0 +1,12 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector; + +import lombok.Data; +import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo; + +/** + * @see ConnectorStateInfo.TaskState + */ +@Data +public class KSTaskState extends KSAbstractConnectState { + private int id; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/plugin/ConnectPluginBasic.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/plugin/ConnectPluginBasic.java new file mode 100644 index 00000000..82c2ad84 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/connect/plugin/ConnectPluginBasic.java @@ -0,0 +1,38 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.connect.plugin; + +import com.alibaba.fastjson.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.annotations.ApiModel; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.io.Serializable; + +/** + * @author zengqiao + * @date 22/10/17 + */ +@Data +@ApiModel(description = "Connect插件信息") +@NoArgsConstructor +public class ConnectPluginBasic implements Serializable { + /** + * Json序列化时对应的字段 + */ + @JSONField(name="class") + @JsonProperty("class") + private String className; + + private String type; + + private String version; + + private String helpDocLink; + + public ConnectPluginBasic(String className, String type, String version, String helpDocLink) { + this.className = className; + this.type = type; + this.version = version; + this.helpDocLink = helpDocLink; + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/group/Group.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/group/Group.java index 3b2e22e9..656924b7 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/group/Group.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/group/Group.java @@ -1,12 +1,12 @@ package com.xiaojukeji.know.streaming.km.common.bean.entity.group; +import com.xiaojukeji.know.streaming.km.common.bean.entity.kafka.KSGroupDescription; import com.xiaojukeji.know.streaming.km.common.constant.Constant; import com.xiaojukeji.know.streaming.km.common.enums.group.GroupStateEnum; import com.xiaojukeji.know.streaming.km.common.enums.group.GroupTypeEnum; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; -import org.apache.kafka.clients.admin.ConsumerGroupDescription; import java.util.ArrayList; import java.util.List; @@ -61,14 +61,14 @@ public class Group { */ private int coordinatorId; - public Group(Long clusterPhyId, String groupName, ConsumerGroupDescription groupDescription) { + public Group(Long clusterPhyId, String groupName, KSGroupDescription groupDescription) { this.clusterPhyId = clusterPhyId; - this.type = groupDescription.isSimpleConsumerGroup()? GroupTypeEnum.CONSUMER: GroupTypeEnum.CONNECTOR; + this.type = GroupTypeEnum.getTypeByProtocolType(groupDescription.protocolType()); this.name = groupName; this.state = GroupStateEnum.getByRawState(groupDescription.state()); - this.memberCount = groupDescription.members() == null? 0: groupDescription.members().size(); + this.memberCount = groupDescription.members() == null ? 0 : groupDescription.members().size(); this.topicMembers = new ArrayList<>(); this.partitionAssignor = groupDescription.partitionAssignor(); - this.coordinatorId = groupDescription.coordinator() == null? Constant.INVALID_CODE: groupDescription.coordinator().id(); + this.coordinatorId = groupDescription.coordinator() == null ? Constant.INVALID_CODE : groupDescription.coordinator().id(); } } diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/cluster/ConnectClusterParam.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/cluster/ConnectClusterParam.java new file mode 100644 index 00000000..2f830c55 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/cluster/ConnectClusterParam.java @@ -0,0 +1,16 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * @author wyb + * @date 2022/11/9 + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ConnectClusterParam extends ClusterParam{ + protected Long connectClusterId; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/connect/ConnectorParam.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/connect/ConnectorParam.java new file mode 100644 index 00000000..0f5b0a75 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/connect/ConnectorParam.java @@ -0,0 +1,26 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.param.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterPhyParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ConnectClusterParam; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * @author wyb + * @date 2022/11/8 + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ConnectorParam extends ConnectClusterParam { + + private String connectorName; + + public ConnectorParam(Long connectClusterId, String connectorName) { + super(connectClusterId); + this.connectorName = connectorName; + } + +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/metric/connect/ConnectClusterMetricParam.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/metric/connect/ConnectClusterMetricParam.java new file mode 100644 index 00000000..92946c5c --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/metric/connect/ConnectClusterMetricParam.java @@ -0,0 +1,21 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.param.metric.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.metric.MetricParam; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * @author wyb + * @date 2022/11/1 + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ConnectClusterMetricParam extends MetricParam { + + private Long connectClusterId; + + private String metric; + +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/metric/connect/ConnectorMetricParam.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/metric/connect/ConnectorMetricParam.java new file mode 100644 index 00000000..6cad85eb --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/param/metric/connect/ConnectorMetricParam.java @@ -0,0 +1,29 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.param.metric.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.metric.MetricParam; +import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * @author wyb + * @date 2022/11/2 + */ +@Data +@NoArgsConstructor +public class ConnectorMetricParam extends MetricParam { + private Long connectClusterId; + + private String connectorName; + + private String metricName; + + private ConnectorTypeEnum connectorType; + + public ConnectorMetricParam(Long connectClusterId, String connectorName, String metricName, ConnectorTypeEnum connectorType) { + this.connectClusterId = connectClusterId; + this.connectorName = connectorName; + this.metricName = metricName; + this.connectorType = connectorType; + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/result/Result.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/result/Result.java index bd3b8cc8..54281b40 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/result/Result.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/result/Result.java @@ -100,6 +100,13 @@ public class Result extends BaseResult { return result; } + public static Result buildFrom(Result ret) { + Result result = new Result<>(); + result.setCode(ret.getCode()); + result.setMessage(ret.getMessage()); + return result; + } + public static Result buildFrom(ValidateKafkaAddressErrorEnum errorEnum, String msg) { Result result = new Result<>(); result.setCode(errorEnum.getCode()); diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/result/ResultStatus.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/result/ResultStatus.java index 252146c9..444a7940 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/result/ResultStatus.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/result/ResultStatus.java @@ -54,6 +54,8 @@ public enum ResultStatus { * 调用错误, [8000, 9000) */ KAFKA_OPERATE_FAILED(8010, "Kafka操作失败"), + KAFKA_CONNECTOR_OPERATE_FAILED(8011, "KafkaConnect操作失败"), + KAFKA_CONNECTOR_READ_FAILED(8012, "KafkaConnect读失败"), MYSQL_OPERATE_FAILED(8020, "MySQL操作失败"), ZK_OPERATE_FAILED(8030, "ZK操作失败"), ZK_FOUR_LETTER_CMD_FORBIDDEN(8031, "ZK四字命令被禁止"), diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/version/VersionConnectJmxInfo.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/version/VersionConnectJmxInfo.java new file mode 100644 index 00000000..7c4e257a --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/version/VersionConnectJmxInfo.java @@ -0,0 +1,13 @@ +package com.xiaojukeji.know.streaming.km.common.bean.entity.version; + +import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum; +import lombok.Data; + +/** + * @author wyb + * @date 2022/11/24 + */ +@Data +public class VersionConnectJmxInfo extends VersionJmxInfo{ + private ConnectorTypeEnum type; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/version/VersionControlItem.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/version/VersionControlItem.java index f0af86df..56b9169c 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/version/VersionControlItem.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/version/VersionControlItem.java @@ -2,7 +2,6 @@ package com.xiaojukeji.know.streaming.km.common.bean.entity.version; import com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum; import lombok.AllArgsConstructor; -import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/zookeeper/Znode.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/zookeeper/Znode.java index fd25df57..24868a93 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/zookeeper/Znode.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/entity/zookeeper/Znode.java @@ -1,7 +1,5 @@ package com.xiaojukeji.know.streaming.km.common.bean.entity.zookeeper; - -import com.xiaojukeji.know.streaming.km.common.utils.Tuple; import io.swagger.annotations.ApiModelProperty; import lombok.Data; import org.apache.zookeeper.data.Stat; diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/event/cluster/connect/ConnectClusterLoadChangedEvent.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/event/cluster/connect/ConnectClusterLoadChangedEvent.java new file mode 100644 index 00000000..659218c6 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/event/cluster/connect/ConnectClusterLoadChangedEvent.java @@ -0,0 +1,27 @@ +package com.xiaojukeji.know.streaming.km.common.bean.event.cluster.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.OperationEnum; +import lombok.Getter; +import org.springframework.context.ApplicationEvent; + +/** + * @author wyb + * @date 2022/11/7 + */ +@Getter +public class ConnectClusterLoadChangedEvent extends ApplicationEvent { + + private ConnectCluster inDBConnectCluster; + + private ConnectCluster inCacheConnectCluster; + + private final OperationEnum operationEnum; + + public ConnectClusterLoadChangedEvent(Object source, ConnectCluster inDBConnectCluster, ConnectCluster inCacheConnectCluster, OperationEnum operationEnum) { + super(source); + this.inDBConnectCluster = inDBConnectCluster; + this.inCacheConnectCluster = inCacheConnectCluster; + this.operationEnum = operationEnum; + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/event/metric/connect/ConnectClusterMetricEvent.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/event/metric/connect/ConnectClusterMetricEvent.java new file mode 100644 index 00000000..2e6101d1 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/event/metric/connect/ConnectClusterMetricEvent.java @@ -0,0 +1,21 @@ +package com.xiaojukeji.know.streaming.km.common.bean.event.metric.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectClusterMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.event.metric.BaseMetricEvent; +import lombok.Getter; + +import java.util.List; + +/** + * @author wyb + * @date 2022/11/7 + */ +@Getter +public class ConnectClusterMetricEvent extends BaseMetricEvent { + private List connectClusterMetrics; + + public ConnectClusterMetricEvent(Object source, List connectClusterMetrics) { + super(source); + this.connectClusterMetrics = connectClusterMetrics; + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/event/metric/connect/ConnectorMetricEvent.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/event/metric/connect/ConnectorMetricEvent.java new file mode 100644 index 00000000..23de77c0 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/event/metric/connect/ConnectorMetricEvent.java @@ -0,0 +1,21 @@ +package com.xiaojukeji.know.streaming.km.common.bean.event.metric.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectorMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.event.metric.BaseMetricEvent; +import lombok.Getter; + +import java.util.List; + +/** + * @author wyb + * @date 2022/11/7 + */ +@Getter +public class ConnectorMetricEvent extends BaseMetricEvent { + private List connectorMetricsList; + + public ConnectorMetricEvent(Object source, List connectorMetricsList) { + super(source); + this.connectorMetricsList = connectorMetricsList; + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/ConnectClusterPO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/ConnectClusterPO.java new file mode 100644 index 00000000..f0a364e6 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/ConnectClusterPO.java @@ -0,0 +1,51 @@ +package com.xiaojukeji.know.streaming.km.common.bean.po.connect; + +import com.baomidou.mybatisplus.annotation.TableName; +import com.xiaojukeji.know.streaming.km.common.bean.po.BasePO; +import com.xiaojukeji.know.streaming.km.common.constant.Constant; +import lombok.Data; + +@Data +@TableName(Constant.MYSQL_KC_TABLE_NAME_PREFIX + "connect_cluster") +public class ConnectClusterPO extends BasePO { + /** + * Kafka集群ID + */ + private Long kafkaClusterPhyId; + + /** + * 集群名字 + */ + private String name; + + /** + * 集群使用的消费组 + */ + private String groupName; + + /** + * 集群使用的消费组状态,也表示集群状态 + */ + private Integer state; + + /** + * 集群地址 + */ + private String clusterUrl; + + /** + * worker中显示的leader url信息 + */ + private String memberLeaderUrl; + + /** + * 版本信息 + */ + private String version; + + /** + * jmx配置 + * @see com.xiaojukeji.know.streaming.km.common.bean.entity.config.JmxConfig + */ + private String jmxProperties; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/ConnectWorkerPO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/ConnectWorkerPO.java new file mode 100644 index 00000000..5eb9a9ef --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/ConnectWorkerPO.java @@ -0,0 +1,55 @@ +package com.xiaojukeji.know.streaming.km.common.bean.po.connect; + +import com.baomidou.mybatisplus.annotation.TableName; +import com.xiaojukeji.know.streaming.km.common.bean.po.BasePO; +import com.xiaojukeji.know.streaming.km.common.constant.Constant; +import lombok.Data; + +@Data +@TableName(Constant.MYSQL_KC_TABLE_NAME_PREFIX + "worker") +public class ConnectWorkerPO extends BasePO { + /** + * Kafka集群ID + */ + private Long kafkaClusterPhyId; + + /** + * 集群ID + */ + private Long connectClusterId; + + /** + * 成员ID + */ + private String memberId; + + /** + * 主机 + */ + private String host; + + /** + * Jmx端口 + */ + private Integer jmxPort; + + /** + * URL + */ + private String url; + + /** + * leader的URL + */ + private String leaderUrl; + + /** + * 1:是leader,0:不是leader + */ + private Integer leader; + + /** + * worker地址 + */ + private String workerId; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/ConnectorPO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/ConnectorPO.java new file mode 100644 index 00000000..1853deef --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/ConnectorPO.java @@ -0,0 +1,50 @@ +package com.xiaojukeji.know.streaming.km.common.bean.po.connect; + +import com.baomidou.mybatisplus.annotation.TableName; +import com.xiaojukeji.know.streaming.km.common.bean.po.BasePO; +import com.xiaojukeji.know.streaming.km.common.constant.Constant; +import lombok.Data; + +@Data +@TableName(Constant.MYSQL_KC_TABLE_NAME_PREFIX + "connector") +public class ConnectorPO extends BasePO { + /** + * Kafka集群ID + */ + private Long kafkaClusterPhyId; + + /** + * connect集群ID + */ + private Long connectClusterId; + + /** + * connector名称 + */ + private String connectorName; + + /** + * connector类名 + */ + private String connectorClassName; + + /** + * connector类型 + */ + private String connectorType; + + /** + * 访问过的Topic列表 + */ + private String topics; + + /** + * task数 + */ + private Integer taskCount; + + /** + * 状态 + */ + private String state; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/WorkerConnectorPO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/WorkerConnectorPO.java new file mode 100644 index 00000000..f30d0ab6 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/po/connect/WorkerConnectorPO.java @@ -0,0 +1,45 @@ +package com.xiaojukeji.know.streaming.km.common.bean.po.connect; + +import com.baomidou.mybatisplus.annotation.TableName; +import com.xiaojukeji.know.streaming.km.common.bean.po.BasePO; +import com.xiaojukeji.know.streaming.km.common.constant.Constant; +import lombok.Data; + +@Data +@TableName(Constant.MYSQL_KC_TABLE_NAME_PREFIX + "worker_connector") +public class WorkerConnectorPO extends BasePO { + /** + * connect集群ID + */ + private Long connectClusterId; + + /** + * kafka集群ID + */ + private Long kafkaClusterPhyId; + + /** + * connector名称 + */ + private String connectorName; + + /** + * worker成员ID + */ + private String workerMemberId; + + /** + * 任务ID + */ + private Integer taskId; + + /** + * task状态 + */ + private String state; + + /** + * worker信息 + */ + private String workerId; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connect/ConnectClusterBasicCombineExistVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connect/ConnectClusterBasicCombineExistVO.java new file mode 100644 index 00000000..4cb919a7 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connect/ConnectClusterBasicCombineExistVO.java @@ -0,0 +1,18 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connect; + +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + + +/** + * 集群的Connect集群信息 + * @author zengqiao + * @date 22/02/23 + */ +@Data +@ApiModel(description = "Connect集群基本信息") +public class ConnectClusterBasicCombineExistVO extends ConnectClusterBasicVO { + @ApiModelProperty(value="是否存在", example = "true") + protected Boolean exist; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connect/ConnectClusterBasicVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connect/ConnectClusterBasicVO.java new file mode 100644 index 00000000..53bdc0ed --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connect/ConnectClusterBasicVO.java @@ -0,0 +1,44 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.vo.BaseVO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + + +/** + * 集群的Connect集群信息 + * @author zengqiao + * @date 22/02/23 + */ +@Data +@ApiModel(description = "Connect集群基本信息") +public class ConnectClusterBasicVO extends BaseVO { + @ApiModelProperty(value = "Connect集群ID", example = "1") + private Long id; + + @ApiModelProperty(value = "Connect集群名称", example = "know-streaming") + private String name; + + @ApiModelProperty(value = "Connect集群使用的Group", example = "know-streaming") + private String groupName; + + @ApiModelProperty(value = "Connect集群URL", example = "http://127.0.0.1:8080") + private String clusterUrl; + + @ApiModelProperty(value = "Connect集群获取到的URL", example = "http://127.0.0.1:8080") + private String memberLeaderUrl; + + @ApiModelProperty(value = "Connect集群版本", example = "2.5.1") + private String version; + + @ApiModelProperty(value = "JMX配置", example = "") + private String jmxProperties; + + /** + * 集群使用的消费组状态,也表示集群状态 + * @see com.xiaojukeji.know.streaming.km.common.enums.group.GroupStateEnum + */ + @ApiModelProperty(value = "状态,2表示Dead,只有Dead才可以删除", example = "") + private Integer state; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connect/ConnectStateVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connect/ConnectStateVO.java new file mode 100644 index 00000000..e94b0b28 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connect/ConnectStateVO.java @@ -0,0 +1,42 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.vo.BaseVO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * 集群Connectors状态信息 + * @author zengqiao + * @date 22/10/17 + */ +@Data +@ApiModel(description = "集群Connects状态信息") +public class ConnectStateVO extends BaseVO { + @ApiModelProperty(value = "健康检查状态", example = "1") + private Integer healthState; + + @ApiModelProperty(value = "健康检查通过数", example = "1") + private Integer healthCheckPassed; + + @ApiModelProperty(value = "健康检查总数", example = "1") + private Integer healthCheckTotal; + + @ApiModelProperty(value = "connect集群数", example = "1") + private Integer connectClusterCount; + + @ApiModelProperty(value = "worker数", example = "1") + private Integer workerCount; + + @ApiModelProperty(value = "总Connector数", example = "1") + private Integer totalConnectorCount; + + @ApiModelProperty(value = "存活Connector数", example = "1") + private Integer aliveConnectorCount; + + @ApiModelProperty(value = "总Task数", example = "1") + private Integer totalTaskCount; + + @ApiModelProperty(value = "存活Task数", example = "1") + private Integer aliveTaskCount; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ClusterConnectorOverviewVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ClusterConnectorOverviewVO.java new file mode 100644 index 00000000..8533eec1 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ClusterConnectorOverviewVO.java @@ -0,0 +1,42 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BaseMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricLineVO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import java.util.List; + +/** + * 集群Connector信息 + * @author zengqiao + * @date 22/02/23 + */ +@Data +@ApiModel(description = "Connector概览信息") +public class ClusterConnectorOverviewVO extends ConnectorBasicVO { + @ApiModelProperty(value = "Connector插件名称", example = "know-streaming") + private String connectorClassName; + + @ApiModelProperty(value = "Connector类型", example = "source") + private String connectorType; + + /** + * @see org.apache.kafka.connect.runtime.AbstractStatus.State + */ + @ApiModelProperty(value = "状态", example = "RUNNING") + private String state; + + @ApiModelProperty(value = "Task数", example = "100") + private Integer taskCount; + + @ApiModelProperty(value = "访问的Topic列表", example = "") + private List topicNameList; + + @ApiModelProperty(value = "多个指标的当前值, 包括健康分/LogSize等") + private BaseMetrics latestMetrics; + + @ApiModelProperty(value = "多个指标的历史曲线值,包括LogSize/BytesIn等") + private List metricLines; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ClusterWorkerOverviewVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ClusterWorkerOverviewVO.java new file mode 100644 index 00000000..c14eb4e2 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ClusterWorkerOverviewVO.java @@ -0,0 +1,31 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector; + +import com.xiaojukeji.know.streaming.km.common.bean.vo.BaseVO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + + +/** + * 集群Worker信息 + * @author zengqiao + * @date 22/02/23 + */ +@Data +@ApiModel(description = "Worker概览信息") +public class ClusterWorkerOverviewVO extends BaseVO { + @ApiModelProperty(value = "Connect集群ID", example = "1") + private Long connectClusterId; + + @ApiModelProperty(value = "Connect集群名称", example = "know-streaming") + private String connectClusterName; + + @ApiModelProperty(value = "worker主机", example = "know-streaming") + private String workerHost; + + @ApiModelProperty(value = "Connector数", example = "10") + private Integer connectorCount; + + @ApiModelProperty(value = "Task数", example = "10") + private Integer taskCount; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ConnectorBasicCombineExistVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ConnectorBasicCombineExistVO.java new file mode 100644 index 00000000..0bf6042f --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ConnectorBasicCombineExistVO.java @@ -0,0 +1,18 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector; + +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + + +/** + * 集群Connector信息 + * @author zengqiao + * @date 22/02/23 + */ +@Data +@ApiModel(description = "Connector基本信息") +public class ConnectorBasicCombineExistVO extends ConnectorBasicVO { + @ApiModelProperty(value="是否存在", example = "true") + protected Boolean exist; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ConnectorBasicVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ConnectorBasicVO.java new file mode 100644 index 00000000..19217668 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/cluster/connector/ConnectorBasicVO.java @@ -0,0 +1,25 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector; + +import com.xiaojukeji.know.streaming.km.common.bean.vo.BaseVO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + + +/** + * 集群Connector信息 + * @author zengqiao + * @date 22/02/23 + */ +@Data +@ApiModel(description = "Connector基本信息") +public class ConnectorBasicVO extends BaseVO { + @ApiModelProperty(value = "Connect集群ID", example = "1") + private Long connectClusterId; + + @ApiModelProperty(value = "Connect集群名称", example = "know-streaming") + private String connectClusterName; + + @ApiModelProperty(value = "Connector名称", example = "know-streaming") + private String connectorName; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/connector/ConnectorStateVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/connector/ConnectorStateVO.java new file mode 100644 index 00000000..7176efd2 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/connector/ConnectorStateVO.java @@ -0,0 +1,32 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.connect.connector; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * @author wyb + * @date 2022/11/15 + */ +@Data +public class ConnectorStateVO { + @ApiModelProperty(value = "connect集群ID", example = "1") + private Long connectClusterId; + + @ApiModelProperty(value = "connector名称", example = "input1") + private String name; + + @ApiModelProperty(value = "connector类型", example = "source") + private String type; + + @ApiModelProperty(value = "connector状态", example = "running") + private String state; + + @ApiModelProperty(value = "总Task数", example = "1") + private Integer totalTaskCount; + + @ApiModelProperty(value = "存活Task数", example = "1") + private Integer aliveTaskCount; + + @ApiModelProperty(value = "总Worker数", example = "1") + private Integer totalWorkerCount; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigInfoVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigInfoVO.java new file mode 100644 index 00000000..172ecdc3 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigInfoVO.java @@ -0,0 +1,19 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.connect.plugin; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import org.apache.kafka.connect.runtime.rest.entities.ConfigInfo; + + +/** + * @see ConfigInfo + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ConnectConfigInfoVO { + private ConnectConfigKeyInfoVO definition; + + private ConnectConfigValueInfoVO value; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigInfosVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigInfosVO.java new file mode 100644 index 00000000..200a32fd --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigInfosVO.java @@ -0,0 +1,25 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.connect.plugin; + + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import org.apache.kafka.connect.runtime.rest.entities.ConfigInfos; + +import java.util.List; + +/** + * @see ConfigInfos + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ConnectConfigInfosVO { + private String name; + + private int errorCount; + + private List groups; + + private List configs; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigKeyInfoVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigKeyInfoVO.java new file mode 100644 index 00000000..424cb344 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigKeyInfoVO.java @@ -0,0 +1,38 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.connect.plugin; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import org.apache.kafka.connect.runtime.rest.entities.ConfigKeyInfo; + +import java.util.List; + +/** + * @see ConfigKeyInfo + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ConnectConfigKeyInfoVO { + private String name; + + private String type; + + private boolean required; + + private String defaultValue; + + private String importance; + + private String documentation; + + private String group; + + private int orderInGroup; + + private String width; + + private String displayName; + + private List dependents; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigValueInfoVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigValueInfoVO.java new file mode 100644 index 00000000..6363cca0 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectConfigValueInfoVO.java @@ -0,0 +1,27 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.connect.plugin; + + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import org.apache.kafka.connect.runtime.rest.entities.ConfigValueInfo; + +import java.util.List; + +/** + * @see ConfigValueInfo + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ConnectConfigValueInfoVO { + private String name; + + private String value; + + private List recommendedValues; + + private List errors; + + private boolean visible; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectPluginBasicVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectPluginBasicVO.java new file mode 100644 index 00000000..1b9c811b --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/plugin/ConnectPluginBasicVO.java @@ -0,0 +1,26 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.connect.plugin; + +import com.xiaojukeji.know.streaming.km.common.bean.vo.BaseVO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * @author zengqiao + * @date 22/10/17 + */ +@Data +@ApiModel(description = "Connect插件信息") +public class ConnectPluginBasicVO extends BaseVO { + @ApiModelProperty(value = "指标或操作项名称", example = "org.apache.kafka.connect.file.FileStreamSinkConnector") + private String className; + + @ApiModelProperty(value = "类型", example = "source|sink") + private String type; + + @ApiModelProperty(value = "版本", example = "2.5.1") + private String version; + + @ApiModelProperty(value = "帮助文档地址", example = "") + private String helpDocLink; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/task/KCTaskOverviewVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/task/KCTaskOverviewVO.java new file mode 100644 index 00000000..c82b0ed4 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/connect/task/KCTaskOverviewVO.java @@ -0,0 +1,32 @@ +package com.xiaojukeji.know.streaming.km.common.bean.vo.connect.task; + +import com.xiaojukeji.know.streaming.km.common.bean.vo.BaseVO; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + + +/** + * Task信息概览 + * @author zengqiao + * @date 22/02/23 + */ +@Data +@ApiModel(description = "Task信息概览") +public class KCTaskOverviewVO extends BaseVO { + + @ApiModelProperty(value = "connect集群ID", example = "1") + private Long connectClusterId; + + @ApiModelProperty(value = "taskId", example = "1") + private Integer taskId; + + @ApiModelProperty(value = "worker地址", example = "127.0.0.1:8080") + private String workerId; + + @ApiModelProperty(value = "task状态", example = "RUNNING") + private String state; + + @ApiModelProperty(value = "错误原因", example = "asx") + private String trace; +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/health/HealthCheckConfigVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/health/HealthCheckConfigVO.java index c9857c56..cb889444 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/health/HealthCheckConfigVO.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/health/HealthCheckConfigVO.java @@ -20,6 +20,9 @@ public class HealthCheckConfigVO { @ApiModelProperty(value="检查维度名称", example = "Broker") private String dimensionName; + @ApiModelProperty(value="检查维度前端展示名称", example = "Connector") + private String dimensionDisplayName; + @ApiModelProperty(value="配置组", example = "HEALTH") private String configGroup; diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/health/HealthScoreBaseResultVO.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/health/HealthScoreBaseResultVO.java index 113a74ab..6bb0b584 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/health/HealthScoreBaseResultVO.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/bean/vo/health/HealthScoreBaseResultVO.java @@ -21,6 +21,9 @@ public class HealthScoreBaseResultVO extends BaseTimeVO { @ApiModelProperty(value="检查维度名称", example = "cluster") private String dimensionName; + @ApiModelProperty(value="检查维度前端显示名称", example = "cluster") + private String dimensionDisplayName; + @ApiModelProperty(value="检查名称", example = "Group延迟") private String configName; diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/component/RestTemplateConfig.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/component/RestTemplateConfig.java index 12019702..332b6776 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/component/RestTemplateConfig.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/component/RestTemplateConfig.java @@ -131,13 +131,22 @@ public class RestTemplateConfig { } } catch (Exception e) { RESP_LOGGER.warn( - "method=traceResponse||code={}||url={}||text={}||headers={}||body={}||error={}||timeCost={}||subFlag={}", - response.getStatusCode(), url, response.getStatusText(), response.getHeaders(), - inputStringBuilder.toString(), e, (System.nanoTime() - nanoTime) / 1000 / 1000, subFlag); + "method=traceResponse||remoteResponse||code={}||url={}||text={}||headers={}||body={}||error={}||timeCost={}||subFlag={}", + response.getStatusCode(), + url, + response.getStatusText(), + response.getHeaders(), + inputStringBuilder.toString(), + e, + (System.nanoTime() - nanoTime) / 1000 / 1000, + subFlag + ); + if (!response.getStatusCode().is2xxSuccessful()) { - throw new ThirdPartRemoteException(e.getMessage(), e, ResultStatus.HTTP_REQ_ERROR); + throw new ThirdPartRemoteException(getResponseBodyAndIgnoreException(response), e, ResultStatus.HTTP_REQ_ERROR); } } + String responseString = inputStringBuilder.toString().replace("\n", ""); responseString = responseString.substring(0, Math.min(responseString.length(), 5000)); @@ -172,6 +181,19 @@ public class RestTemplateConfig { } + private String getResponseBodyAndIgnoreException(ClientHttpResponse response) { + try { + byte[] bytes = new byte[response.getBody().available()]; + response.getBody().read(bytes); + + return new String(bytes); + } catch (Exception e) { + // ignore + } + + return ""; + } + private static String simpleUrl(HttpRequest request) { String url = request.getURI().toString(); int index = url.indexOf("?"); diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/component/RestTool.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/component/RestTool.java index 7ee45ef5..058e4540 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/component/RestTool.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/component/RestTool.java @@ -13,6 +13,7 @@ import org.springframework.web.client.RestTemplate; import org.springframework.web.util.UriComponentsBuilder; import java.lang.reflect.Type; +import java.util.List; import java.util.Map; /** @@ -22,7 +23,6 @@ import java.util.Map; */ @Component public class RestTool { - private static final ILog LOGGER = LogFactory.getLog(RestTool.class); @Autowired @@ -38,39 +38,38 @@ public class RestTool { * @return */ public T postObjectWithRawContent(String url, Object postBody, HttpHeaders headers, Class resultType) { - ResponseEntity result = restTemplate.exchange(url, HttpMethod.POST, new HttpEntity<>(postBody, headers), - String.class); + ResponseEntity result = restTemplate.exchange( + url, + HttpMethod.POST, + new HttpEntity<>(postBody, headers), + String.class + ); + return ConvertUtil.toObj(result.getBody(), resultType); } /** * POST请求 * @param url 请求地址 - * @param request 请求内容 - * @param responseType 期望返回的类型 + * @param postBody 请求内容 + * @param resultType 期望返回的类型 * @param 泛型T * @return T */ - public T postObjectWithJsonContent(String url, Object request, Type responseType) { - HttpHeaders jsonHead = getJsonContentHeaders(); - ResponseEntity result = restTemplate.exchange(url, HttpMethod.POST, - new HttpEntity( ConvertUtil.obj2Json(request), jsonHead), String.class); - return ConvertUtil.toObj(result.getBody(), responseType); + public T postObjectWithJsonContent(String url, Object postBody, Class resultType) { + return this.postObjectWithRawContent(url, postBody, this.getJsonContentHeaders(), resultType); } /** * POST请求 * @param url 请求地址 - * @param request 请求内容 - * @param responseType 期望返回的类型 + * @param postBody 请求内容 + * @param resultType 期望返回的类型 * @param 泛型T * @return T */ - public T postObjectWithJsonContentAndHeader(String url, Map headers, Object request, - Type responseType) { - ResponseEntity result = restTemplate.exchange(url, HttpMethod.POST, - new HttpEntity(ConvertUtil.obj2Json(request), getJsonContentHeaders(headers)), String.class); - return ConvertUtil.toObj(result.getBody(), responseType); + public T postObjectWithJsonContentAndHeader(String url, Map headers, Object postBody, Class resultType) { + return this.postObjectWithRawContent(url, postBody, this.getJsonContentHeaders(headers), resultType); } /** @@ -81,8 +80,15 @@ public class RestTool { * @param 泛型T * @return T */ - public T getObjectWithJsonContent(String url, Map params, Type resultType) { - ResponseEntity result = restTemplate.exchange(url, HttpMethod.GET, null, String.class, params); + public T getObjectWithJsonContent(String url, Map params, Class resultType) { + ResponseEntity result = restTemplate.exchange( + url, + HttpMethod.GET, + null, + String.class, + params + ); + return ConvertUtil.toObj(result.getBody(), resultType); } @@ -95,8 +101,13 @@ public class RestTool { * @return T */ public T getForObject(String url, HttpHeaders headers, Type resultType) { - ResponseEntity result = restTemplate.exchange(url, HttpMethod.GET, new HttpEntity<>(null, headers), - String.class); + ResponseEntity result = restTemplate.exchange( + url, + HttpMethod.GET, + new HttpEntity<>(null, headers), + String.class + ); + return ConvertUtil.toObj(result.getBody(), resultType); } @@ -169,6 +180,26 @@ public class RestTool { return result.getBody(); } + /** + * GET请求 + * @param url 请求地址 + * @param params 请求参数 + * @param resultType 返回类型 + * @param 泛型T + * @return T + */ + public List getArrayObjectWithJsonContent(String url, Map params, Class resultType) { + ResponseEntity result = restTemplate.exchange( + url, + HttpMethod.GET, + null, + String.class, + params + ); + + return ConvertUtil.str2ObjArrayByJson(result.getBody(), resultType); + } + /** * 根据map中的参数构建url+queryString * @param url 请求地址 @@ -181,7 +212,6 @@ public class RestTool { } UriComponentsBuilder builder = UriComponentsBuilder.fromHttpUrl(url); - return builder.toUriString(); } diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/ApiPrefix.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/ApiPrefix.java index 4b514998..332f639c 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/ApiPrefix.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/ApiPrefix.java @@ -12,6 +12,8 @@ public class ApiPrefix { public static final String API_V3_PREFIX = API_PREFIX + "v3/"; + public static final String API_V3_CONNECT_PREFIX = API_V3_PREFIX + "kafka-connect/"; + public static final String API_V3_OPEN_PREFIX = API_V3_PREFIX + "open/"; private ApiPrefix() { diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/Constant.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/Constant.java index 6cfdf338..df7ecce3 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/Constant.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/Constant.java @@ -45,6 +45,7 @@ public class Constant { public static final int INVALID_CODE = -1; public static final String MYSQL_TABLE_NAME_PREFIX = "ks_km_"; + public static final String MYSQL_KC_TABLE_NAME_PREFIX = "ks_kc_"; public static final String SWAGGER_API_TAG_PREFIX = "KS-KM-"; diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/KafkaConstant.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/KafkaConstant.java index 9ab0bad1..b7d6ffaf 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/KafkaConstant.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/KafkaConstant.java @@ -43,6 +43,8 @@ public class KafkaConstant { public static final String CONTROLLER_ROLE = "controller"; + public static final String DEFAULT_CONNECT_VERSION = "2.5.0"; + public static final Map KAFKA_ALL_CONFIG_DEF_MAP = new ConcurrentHashMap<>(); static { diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/MsgConstant.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/MsgConstant.java index 9072810d..768ebddf 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/MsgConstant.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/MsgConstant.java @@ -95,4 +95,19 @@ public class MsgConstant { public static String getJobNotExist(Long jobId) { return String.format("jobId:[%d] 不存在", jobId); } + + + /**************************************************** Connect-Cluster ****************************************************/ + + public static String getConnectClusterBizStr(Long clusterId, String clusterName){ + return String.format("Connect集群ID:[%d] 集群名称:[%s]", clusterId, clusterName); + } + + public static String getConnectClusterNotExist(Long clusterId) { + return String.format("Connect集群ID:[%d] 不存在或者未加载", clusterId); + } + + public static String getConnectorBizStr(Long clusterPhyId, String topicName) { + return String.format("Connect集群ID:[%d] Connector名称:[%s]", clusterPhyId, topicName); + } } diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/connect/KafkaConnectConstant.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/connect/KafkaConnectConstant.java new file mode 100644 index 00000000..5746bfd9 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/constant/connect/KafkaConnectConstant.java @@ -0,0 +1,15 @@ +package com.xiaojukeji.know.streaming.km.common.constant.connect; + +/** + * @author zengqiao + * @date 20/5/20 + */ +public class KafkaConnectConstant { + public static final String CONNECTOR_CLASS_FILED_NAME = "connector.class"; + + public static final String CONNECTOR_TOPICS_FILED_NAME = "topics"; + public static final String CONNECTOR_TOPICS_FILED_ERROR_VALUE = "know-streaming-connect-illegal-value"; + + private KafkaConnectConstant() { + } +} \ No newline at end of file diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/converter/ConnectConverter.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/converter/ConnectConverter.java new file mode 100644 index 00000000..387c8469 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/converter/ConnectConverter.java @@ -0,0 +1,142 @@ +package com.xiaojukeji.know.streaming.km.common.converter; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorInfo; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorStateInfo; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectorMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connect.ConnectClusterBasicCombineExistVO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector.ClusterConnectorOverviewVO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector.ConnectorBasicCombineExistVO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector.ConnectorBasicVO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricLineVO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO; +import com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant; +import com.xiaojukeji.know.streaming.km.common.utils.CommonUtils; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class ConnectConverter { + public static ConnectorBasicCombineExistVO convert2BasicVO(ConnectCluster connectCluster, ConnectorPO connectorPO) { + ConnectorBasicCombineExistVO vo = new ConnectorBasicCombineExistVO(); + if (connectCluster == null || connectorPO == null) { + vo.setExist(false); + return vo; + } + + vo.setExist(true); + vo.setConnectClusterId(connectorPO.getConnectClusterId()); + vo.setConnectClusterName(connectCluster.getName()); + vo.setConnectorName(connectorPO.getConnectorName()); + + return vo; + } + + public static List convert2BasicVOList( + List clusterList, + List poList) { + Map clusterMap = new HashMap<>(); + clusterList.stream().forEach(elem -> clusterMap.put(elem.getId(), elem)); + + List voList = new ArrayList<>(); + poList.stream().filter(item -> clusterMap.containsKey(item.getConnectClusterId())).forEach(elem -> { + ConnectorBasicVO vo = new ConnectorBasicVO(); + vo.setConnectClusterId(elem.getConnectClusterId()); + vo.setConnectClusterName(clusterMap.get(elem.getConnectClusterId()).getName()); + vo.setConnectorName(elem.getConnectorName()); + + voList.add(vo); + }); + + return voList; + } + + public static ConnectClusterBasicCombineExistVO convert2ConnectClusterBasicCombineExistVO(ConnectCluster connectCluster) { + if (connectCluster == null) { + ConnectClusterBasicCombineExistVO combineExistVO = new ConnectClusterBasicCombineExistVO(); + combineExistVO.setExist(false); + + return combineExistVO; + } + + ConnectClusterBasicCombineExistVO combineExistVO = ConvertUtil.obj2Obj(connectCluster, ConnectClusterBasicCombineExistVO.class); + combineExistVO.setExist(true); + return combineExistVO; + } + + public static List convert2ClusterConnectorOverviewVOList(List clusterList, + List poList, + List metricsList) { + Map clusterMap = new HashMap<>(); + clusterList.stream().forEach(elem -> clusterMap.put(elem.getId(), elem)); + + Map metricMap = metricsList.stream().collect(Collectors.toMap(elem -> elem.getConnectClusterId() + "@" + elem.getConnectorName(), Function.identity())); + + List voList = new ArrayList<>(); + poList.stream().filter(item -> clusterMap.containsKey(item.getConnectClusterId())).forEach(elem -> { + ClusterConnectorOverviewVO vo = new ClusterConnectorOverviewVO(); + vo.setConnectClusterId(elem.getConnectClusterId()); + vo.setConnectClusterName(clusterMap.get(elem.getConnectClusterId()).getName()); + vo.setConnectorName(elem.getConnectorName()); + vo.setConnectorClassName(elem.getConnectorClassName()); + vo.setConnectorType(elem.getConnectorType()); + vo.setState(elem.getState()); + vo.setTaskCount(elem.getTaskCount()); + vo.setTopicNameList(CommonUtils.string2StrList(elem.getTopics())); + vo.setLatestMetrics(metricMap.getOrDefault(elem.getConnectClusterId() + "@" + elem.getConnectorName(), new ConnectorMetrics(elem.getConnectClusterId(), elem.getConnectorName()))); + voList.add(vo); + }); + + return voList; + } + + public static List supplyData2ClusterConnectorOverviewVOList(List voList, + List metricLineVOList) { + Map> metricLineMap = new HashMap<>(); + if (metricLineVOList != null) { + for (MetricMultiLinesVO metricMultiLinesVO : metricLineVOList) { + metricMultiLinesVO.getMetricLines() + .forEach(metricLineVO -> { + String key = metricLineVO.getName(); + List metricLineVOS = metricLineMap.getOrDefault(key, new ArrayList<>()); + metricLineVOS.add(metricLineVO); + metricLineMap.put(key, metricLineVOS); + }); + } + } + + voList.forEach(elem -> { + elem.setMetricLines(metricLineMap.get(genConnectorKey(elem.getConnectClusterId(), elem.getConnectorName()))); + }); + + return voList; + } + + public static KSConnector convert2KSConnector(Long kafkaClusterPhyId, Long connectClusterId, KSConnectorInfo connectorInfo, KSConnectorStateInfo stateInfo, List topicNameList) { + KSConnector ksConnector = new KSConnector(); + ksConnector.setKafkaClusterPhyId(kafkaClusterPhyId); + ksConnector.setConnectClusterId(connectClusterId); + ksConnector.setConnectorName(connectorInfo.getName()); + ksConnector.setConnectorClassName(connectorInfo.getConfig().getOrDefault(KafkaConnectConstant.CONNECTOR_CLASS_FILED_NAME, "")); + ksConnector.setConnectorType(connectorInfo.getType().name()); + ksConnector.setTopics(topicNameList != null? CommonUtils.strList2String(topicNameList): ""); + ksConnector.setTaskCount(connectorInfo.getTasks() != null? connectorInfo.getTasks().size(): 0); + ksConnector.setState(stateInfo != null? stateInfo.getConnector().getState(): ""); + + return ksConnector; + } + + private static String genConnectorKey(Long connectorId, String connectorName){ + return connectorId + "#" + connectorName; + } + + private ConnectConverter() { + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/converter/HealthScoreVOConverter.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/converter/HealthScoreVOConverter.java index e82960b1..5625c604 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/converter/HealthScoreVOConverter.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/converter/HealthScoreVOConverter.java @@ -21,6 +21,7 @@ public class HealthScoreVOConverter { HealthScoreResultDetailVO vo = new HealthScoreResultDetailVO(); vo.setDimension(healthScoreResult.getCheckNameEnum().getDimensionEnum().getDimension()); vo.setDimensionName(healthScoreResult.getCheckNameEnum().getDimensionEnum().getMessage()); + vo.setDimensionDisplayName(healthScoreResult.getCheckNameEnum().getDimensionEnum().getDimensionDisplayName()); vo.setConfigName(healthScoreResult.getCheckNameEnum().getConfigName()); vo.setConfigItem(healthScoreResult.getCheckNameEnum().getConfigItem()); vo.setConfigDesc(healthScoreResult.getCheckNameEnum().getConfigDesc()); @@ -63,6 +64,7 @@ public class HealthScoreVOConverter { public static HealthCheckConfigVO convert2HealthCheckConfigVO(String groupName, BaseClusterHealthConfig config) { HealthCheckConfigVO vo = new HealthCheckConfigVO(); vo.setDimensionCode(config.getCheckNameEnum().getDimensionEnum().getDimension()); + vo.setDimensionDisplayName(config.getCheckNameEnum().getDimensionEnum().getDimensionDisplayName()); vo.setDimensionName(config.getCheckNameEnum().getDimensionEnum().name()); vo.setConfigGroup(groupName); vo.setConfigName(config.getCheckNameEnum().getConfigName()); diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/converter/TopicVOConverter.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/converter/TopicVOConverter.java index 8f5d5c28..0240fde1 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/converter/TopicVOConverter.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/converter/TopicVOConverter.java @@ -1,6 +1,5 @@ package com.xiaojukeji.know.streaming.km.common.converter; -import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BaseMetrics; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.PartitionMetrics; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.TopicMetrics; import com.xiaojukeji.know.streaming.km.common.bean.entity.partition.Partition; @@ -14,7 +13,6 @@ import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiL import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.TopicRecordVO; import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.partition.TopicPartitionVO; import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; -import io.swagger.annotations.ApiModelProperty; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.common.header.Header; diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/connect/ConnectActionEnum.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/connect/ConnectActionEnum.java new file mode 100644 index 00000000..83ee2505 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/connect/ConnectActionEnum.java @@ -0,0 +1,32 @@ +package com.xiaojukeji.know.streaming.km.common.enums.connect; + +public enum ConnectActionEnum { + /** + * + */ + + STOP(2, "stop"), + + RESUME(3,"resume"), + + RESTART(4,"restart"), + + UNKNOWN(-1, "unknown"); + + ConnectActionEnum(int status, String value) { + this.status = status; + this.value = value; + } + + private final int status; + + private final String value; + + public int getStatus() { + return status; + } + + public String getValue() { + return value; + } +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/connect/ConnectorTypeEnum.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/connect/ConnectorTypeEnum.java new file mode 100644 index 00000000..ce275b62 --- /dev/null +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/connect/ConnectorTypeEnum.java @@ -0,0 +1,32 @@ +package com.xiaojukeji.know.streaming.km.common.enums.connect; + +/** + * @author wyb + * @date 2022/11/25 + */ +public enum ConnectorTypeEnum { + + + UNKNOWN(-1, "unknown"), + SOURCE(1, "source"), + SINK(2, "sink"); + + private final int code; + + private final String value; + + ConnectorTypeEnum(int code, String value) { + this.code = code; + this.value = value; + } + + public static ConnectorTypeEnum getByName(String name) { + for (ConnectorTypeEnum typeEnum : ConnectorTypeEnum.values()) { + if (typeEnum.name().equals(name)) { + return typeEnum; + } + } + return UNKNOWN; + } + +} diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/group/GroupTypeEnum.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/group/GroupTypeEnum.java index ebb91ea1..e4909dae 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/group/GroupTypeEnum.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/group/GroupTypeEnum.java @@ -2,6 +2,7 @@ package com.xiaojukeji.know.streaming.km.common.enums.group; import lombok.Getter; + /** * @author wyb * @date 2022/10/11 @@ -13,12 +14,18 @@ public enum GroupTypeEnum { CONSUMER(0, "Consumer客户端的消费组"), - CONNECTOR(1, "Connector的消费组"); + CONNECTOR(1, "Connector的消费组"), + + CONNECT_CLUSTER(2, "Connect集群"); private final Integer code; private final String msg; + public static final String CONNECTOR_PROTOCOL_TYPE = "consumer"; + + public static final String CONNECT_CLUSTER_PROTOCOL_TYPE = "connect"; + GroupTypeEnum(Integer code, String msg) { this.code = code; this.msg = msg; @@ -33,4 +40,19 @@ public enum GroupTypeEnum { } return UNKNOWN; } + + public static GroupTypeEnum getTypeByProtocolType(String protocolType) { + if (protocolType == null) { + return UNKNOWN; + } + if (protocolType.isEmpty()) { + return CONSUMER; + } else if (CONNECTOR_PROTOCOL_TYPE.equals(protocolType)) { + return CONNECTOR; + } else if (CONNECT_CLUSTER_PROTOCOL_TYPE.equals(protocolType)) { + return CONNECT_CLUSTER; + } else { + return UNKNOWN; + } + } } diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/health/HealthCheckDimensionEnum.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/health/HealthCheckDimensionEnum.java index d1b08181..eaa730f4 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/health/HealthCheckDimensionEnum.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/health/HealthCheckDimensionEnum.java @@ -8,19 +8,23 @@ import lombok.Getter; */ @Getter public enum HealthCheckDimensionEnum { - UNKNOWN(-1, "未知"), + UNKNOWN(-1, "未知", "未知"), - CLUSTER(0, "Cluster"), + CLUSTER(0, "Cluster", "Cluster"), - BROKER(1, "Broker"), + BROKER(1, "Broker", "Broker"), - TOPIC(2, "Topic"), + TOPIC(2, "Topic", "Topic"), - GROUP(3, "Group"), + GROUP(3, "Group", "Group"), - ZOOKEEPER(4, "Zookeeper"), + ZOOKEEPER(4, "Zookeeper", "Zookeeper"), - MAX_VAL(100, "所有的dimension的值需要小于MAX_VAL") + CONNECT_CLUSTER(5, "ConnectCluster", "Connect"), + + CONNECTOR(6, "Connector", "Connect"), + + MAX_VAL(100, "所有的dimension的值需要小于MAX_VAL", "Ignore") ; @@ -28,9 +32,12 @@ public enum HealthCheckDimensionEnum { private final String message; - HealthCheckDimensionEnum(int dimension, String message) { + private final String dimensionDisplayName; + + HealthCheckDimensionEnum(int dimension, String message, String dimensionDisplayName) { this.dimension = dimension; this.message = message; + this.dimensionDisplayName=dimensionDisplayName; } public static HealthCheckDimensionEnum getByCode(Integer dimension) { diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/health/HealthCheckNameEnum.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/health/HealthCheckNameEnum.java index 5b294e67..2d6d4133 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/health/HealthCheckNameEnum.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/health/HealthCheckNameEnum.java @@ -132,6 +132,33 @@ public enum HealthCheckNameEnum { false ), + CONNECT_CLUSTER_TASK_STARTUP_FAILURE_PERCENTAGE( + HealthCheckDimensionEnum.CONNECT_CLUSTER, + "TaskStartupFailurePercentage", + Constant.HC_CONFIG_NAME_PREFIX+"CONNECT_CLUSTER_TASK_STARTUP_FAILURE_PERCENTAGE", + "connect集群任务启动失败概率", + HealthCompareValueConfig.class, + false + ), + + CONNECTOR_FAILED_TASK_COUNT( + HealthCheckDimensionEnum.CONNECTOR, + "ConnectorFailedTaskCount", + Constant.HC_CONFIG_NAME_PREFIX+"CONNECTOR_FAILED_TASK_COUNT", + "connector失败状态的任务数量", + HealthCompareValueConfig.class, + false + ), + + CONNECTOR_UNASSIGNED_TASK_COUNT( + HealthCheckDimensionEnum.CONNECTOR, + "ConnectorUnassignedTaskCount", + Constant.HC_CONFIG_NAME_PREFIX+"CONNECTOR_UNASSIGNED_TASK_COUNT", + "connector未被分配的任务数量", + HealthCompareValueConfig.class, + false + ) + ; diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/operaterecord/ModuleEnum.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/operaterecord/ModuleEnum.java index 7c07718b..e7fe82c4 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/operaterecord/ModuleEnum.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/operaterecord/ModuleEnum.java @@ -33,6 +33,9 @@ public enum ModuleEnum { KAFKA_CONTROLLER(70, "KafkaController"), + KAFKA_CONNECT_CLUSTER(80, "KafkaConnectCluster"), + KAFKA_CONNECT_CONNECTOR(81, "KafkaConnectConnector"), + PLATFORM_CONFIG(100, "平台配置"), JOB_KAFKA_REPLICA_REASSIGN(110, "Job-KafkaReplica迁移"), diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/operaterecord/OperationEnum.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/operaterecord/OperationEnum.java index 560ff34b..302cb38b 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/operaterecord/OperationEnum.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/operaterecord/OperationEnum.java @@ -30,6 +30,8 @@ public enum OperationEnum { CANCEL(10, "取消"), + RESTART(11, "重启"), + ; OperationEnum(int code, String desc) { diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/version/VersionItemTypeEnum.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/version/VersionItemTypeEnum.java index 7136e114..7bcf3234 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/version/VersionItemTypeEnum.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/enums/version/VersionItemTypeEnum.java @@ -13,6 +13,10 @@ public enum VersionItemTypeEnum { METRIC_ZOOKEEPER(110, "ZookeeperMetric"), + METRIC_CONNECT_CLUSTER(120, "ConnectClusterMetric"), + METRIC_CONNECT_CONNECTOR(121, "ConnectConnectorMetric"), + METRIC_CONNECT_MIRROR_MAKER(122, "ConnectMirrorMakerMetric"), + /** * 服务端查询 */ @@ -37,6 +41,9 @@ public enum VersionItemTypeEnum { SERVICE_OP_REASSIGNMENT(330, "service_reassign_operation"), + SERVICE_OP_CONNECT_CLUSTER(400, "service_connect_cluster_operation"), + SERVICE_OP_CONNECT_CONNECTOR(401, "service_connect_connector_operation"), + SERVICE_OP_CONNECT_PLUGIN(402, "service_connect_plugin_operation"), /** * 前端操作 diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxAttribute.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxAttribute.java index a9bea1c3..2a89a08c 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxAttribute.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxAttribute.java @@ -34,6 +34,116 @@ public class JmxAttribute { public static final String VERSION = "Version"; + /*********************************************************** connect cluster***********************************************************/ + public static final String TASK_COUNT = "task-count"; + + public static final String CONNECTOR_STARTUP_ATTEMPTS_TOTAL = "connector-startup-attempts-total"; + + public static final String CONNECTOR_STARTUP_FAILURE_PERCENTAGE = "connector-startup-failure-percentage"; + + public static final String CONNECTOR_STARTUP_FAILURE_TOTAL = "connector-startup-failure-total"; + + public static final String CONNECTOR_STARTUP_SUCCESS_PERCENTAGE = "connector-startup-success-percentage"; + + public static final String CONNECTOR_STARTUP_SUCCESS_TOTAL = "connector-startup-success-total"; + + public static final String TASK_STARTUP_ATTEMPTS_TOTAL = "task-startup-attempts-total"; + + public static final String TASK_STARTUP_FAILURE_PERCENTAGE = "task-startup-failure-percentage"; + + public static final String TASK_STARTUP_FAILURE_TOTAL = "task-startup-failure-total"; + + public static final String TASK_STARTUP_SUCCESS_PERCENTAGE = "task-startup-success-percentage"; + + public static final String TASK_STARTUP_SUCCESS_TOTAL = "task-startup-success-total"; + + /*********************************************************** connect ***********************************************************/ + public static final String CONNECTOR_TOTAL_TASK_COUNT = "connector-total-task-count"; + + public static final String CONNECTOR_RUNNING_TASK_COUNT = "connector-running-task-count"; + + public static final String CONNECTOR_PAUSED_TASK_COUNT = "connector-paused-task-count"; + + public static final String CONNECTOR_FAILED_TASK_COUNT = "connector-failed-task-count"; + + public static final String CONNECTOR_UNASSIGNED_TASK_COUNT = "connector-unassigned-task-count"; + + public static final String BATCH_SIZE_AVG = "batch-size-avg"; + + public static final String BATCH_SIZE_MAX = "batch-size-max"; + + public static final String OFFSET_COMMIT_AVG_TIME_MS = "offset-commit-avg-time-ms"; + + public static final String OFFSET_COMMIT_MAX_TIME_MS = "offset-commit-max-time-ms"; + + public static final String OFFSET_COMMIT_FAILURE_PERCENTAGE = "offset-commit-failure-percentage"; + + public static final String OFFSET_COMMIT_SUCCESS_PERCENTAGE = "offset-commit-success-percentage"; + + public static final String POLL_BATCH_AVG_TIME_MS = "poll-batch-avg-time-ms"; + + public static final String POLL_BATCH_MAX_TIME_MS = "poll-batch-max-time-ms"; + + public static final String SOURCE_RECORD_ACTIVE_COUNT = "source-record-active-count"; + + public static final String SOURCE_RECORD_ACTIVE_COUNT_AVG = "source-record-active-count-avg"; + + public static final String SOURCE_RECORD_ACTIVE_COUNT_MAX = "source-record-active-count-max"; + + public static final String SOURCE_RECORD_POLL_RATE = "source-record-poll-rate"; + + public static final String SOURCE_RECORD_POLL_TOTAL = "source-record-poll-total"; + + public static final String SOURCE_RECORD_WRITE_RATE = "source-record-write-rate"; + + public static final String SOURCE_RECORD_WRITE_TOTAL = "source-record-write-total"; + + public static final String OFFSET_COMMIT_COMPLETION_RATE = "offset-commit-completion-rate"; + + public static final String OFFSET_COMMIT_COMPLETION_TOTAL = "offset-commit-completion-total"; + + public static final String OFFSET_COMMIT_SKIP_RATE = "offset-commit-skip-rate"; + + public static final String OFFSET_COMMIT_SKIP_TOTAL = "offset-commit-skip-total"; + + public static final String PARTITION_COUNT = "partition-count"; + + public static final String PUT_BATCH_AVG_TIME_MS = "put-batch-avg-time-ms"; + + public static final String PUT_BATCH_MAX_TIME_MS = "put-batch-max-time-ms"; + + public static final String SINK_RECORD_ACTIVE_COUNT = "sink-record-active-count"; + + public static final String SINK_RECORD_ACTIVE_COUNT_AVG = "sink-record-active-count-avg"; + + public static final String SINK_RECORD_ACTIVE_COUNT_MAX = "sink-record-active-count-max"; + + public static final String SINK_RECORD_LAG_MAX = "sink-record-lag-max"; + + public static final String SINK_RECORD_READ_RATE = "sink-record-read-rate"; + + public static final String SINK_RECORD_READ_TOTAL = "sink-record-read-total"; + + public static final String SINK_RECORD_SEND_RATE = "sink-record-send-rate"; + + public static final String SINK_RECORD_SEND_TOTAL = "sink-record-send-total"; + + public static final String DEADLETTERQUEUE_PRODUCE_FAILURES = "deadletterqueue-produce-failures"; + + public static final String DEADLETTERQUEUE_PRODUCE_REQUESTS = "deadletterqueue-produce-requests"; + + public static final String LAST_ERROR_TIMESTAMP = "last-error-timestamp"; + + public static final String TOTAL_ERRORS_LOGGED = "total-errors-logged"; + + public static final String TOTAL_RECORD_ERRORS = "total-record-errors"; + + public static final String TOTAL_RECORD_FAILURES = "total-record-failures"; + + public static final String TOTAL_RECORDS_SKIPPED = "total-records-skipped"; + + public static final String TOTAL_RETRIES = "total-retries"; + private JmxAttribute() { } } diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxConnectorWrap.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxConnectorWrap.java index ca7c01c4..d9cfb082 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxConnectorWrap.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxConnectorWrap.java @@ -28,9 +28,8 @@ import java.util.concurrent.atomic.AtomicInteger; public class JmxConnectorWrap { private static final Logger LOGGER = LoggerFactory.getLogger(JmxConnectorWrap.class); - private final Long physicalClusterId; - - private final Integer brokerId; + //jmx打印日志时的附带信息 + private final String clientLogIdent; private final Long brokerStartupTime; @@ -44,9 +43,8 @@ public class JmxConnectorWrap { private JmxConfig jmxConfig; - public JmxConnectorWrap(Long physicalClusterId, Integer brokerId, Long brokerStartupTime, String host, Integer port, JmxConfig jmxConfig) { - this.physicalClusterId = physicalClusterId; - this.brokerId = brokerId; + public JmxConnectorWrap(String clientLogIdent, Long brokerStartupTime, String host, Integer port, JmxConfig jmxConfig) { + this.clientLogIdent=clientLogIdent; this.brokerStartupTime = brokerStartupTime; this.host = host; @@ -93,7 +91,7 @@ public class JmxConnectorWrap { jmxConnector = null; } catch (IOException e) { - LOGGER.warn("close JmxConnector exception, physicalClusterId:{} brokerId:{} host:{} port:{}.", physicalClusterId, brokerId, host, port, e); + LOGGER.warn("close JmxConnector exception, clientLogIdent:{} host:{} port:{}.", clientLogIdent, host, port, e); } } @@ -176,12 +174,12 @@ public class JmxConnectorWrap { } jmxConnector = JMXConnectorFactory.connect(new JMXServiceURL(jmxUrl), environment); - LOGGER.info("JMX connect success, physicalClusterId:{} brokerId:{} host:{} port:{}.", physicalClusterId, brokerId, host, port); + LOGGER.info("JMX connect success, clientLogIdent:{} host:{} port:{}.", clientLogIdent, host, port); return true; } catch (MalformedURLException e) { - LOGGER.error("JMX url exception, physicalClusterId:{} brokerId:{} host:{} port:{} jmxUrl:{}", physicalClusterId, brokerId, host, port, jmxUrl, e); + LOGGER.error("JMX url exception, clientLogIdent:{} host:{} port:{} jmxUrl:{}", clientLogIdent, host, port, jmxUrl, e); } catch (Exception e) { - LOGGER.error("JMX connect exception, physicalClusterId:{} brokerId:{} host:{} port:{}.", physicalClusterId, brokerId, host, port, e); + LOGGER.error("JMX connect exception, clientLogIdent:{} host:{} port:{}.", clientLogIdent, host, port, e); } return false; } diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxName.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxName.java index db8b3197..5e11e271 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxName.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/jmx/JmxName.java @@ -69,6 +69,20 @@ public class JmxName { public static final String JMX_ZK_SYNC_CONNECTS_PER_SEC = "kafka.server:type=SessionExpireListener,name=ZooKeeperSyncConnectsPerSec"; public static final String JMX_ZK_DISCONNECTORS_PER_SEC = "kafka.server:type=SessionExpireListener,name=ZooKeeperDisconnectsPerSec"; + /*********************************************************** connect ***********************************************************/ + public static final String JMX_CONNECT_WORKER_METRIC = "kafka.connect:type=connect-worker-metrics"; + + public static final String JMX_CONNECT_WORKER_CONNECTOR_METRIC = "kafka.connect:type=connect-worker-metrics,connector=%s"; + + public static final String JMX_CONNECTOR_TASK_CONNECTOR_METRIC = "kafka.connect:type=connector-task-metrics,connector=%s,task=%s"; + + public static final String JMX_CONNECTOR_SOURCE_TASK_METRICS = "kafka.connect:type=source-task-metrics,connector=%s,task=%s"; + + public static final String JMX_CONNECTOR_SINK_TASK_METRICS = "kafka.connect:type=sink-task-metrics,connector=%s,task=%s"; + + public static final String JMX_CONNECTOR_TASK_ERROR_METRICS = "kafka.connect:type=task-error-metrics,connector=%s,task=%s"; + + private JmxName() { } } diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/utils/CommonUtils.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/utils/CommonUtils.java index 1c451609..f3d2b357 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/utils/CommonUtils.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/utils/CommonUtils.java @@ -7,6 +7,7 @@ import org.springframework.web.multipart.MultipartFile; import java.math.BigDecimal; import java.math.BigInteger; +import java.net.URI; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.util.ArrayList; @@ -251,4 +252,13 @@ public class CommonUtils { return true; } + + public static String getWorkerId(String url){ + try { + URI uri = new URI(url); + return uri.getHost() + ":" + uri.getPort(); + } catch (Exception e) { + return null; + } + } } diff --git a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/utils/PaginationMetricsUtil.java b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/utils/PaginationMetricsUtil.java index c7fcad77..430ea9c4 100644 --- a/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/utils/PaginationMetricsUtil.java +++ b/km-common/src/main/java/com/xiaojukeji/know/streaming/km/common/utils/PaginationMetricsUtil.java @@ -37,6 +37,11 @@ public class PaginationMetricsUtil { return allDataList; } + //比较metricNameList中第一个不为空的metric值。 + public static void sortMetrics(List allDataList, String metricField, List metricNameList, String defaultSortField, String sortType) { + sortMetricList(allDataList, metricField, metricNameList, defaultSortField, sortType); + } + public static List sortMetrics(List allDataList, String metricName, String defaultSortField, String sortType) { sortMetricList(allDataList, metricName, defaultSortField, sortType); @@ -151,6 +156,102 @@ public class PaginationMetricsUtil { return allDataList; } + private static List sortMetricList(List allDataList, String metricFieldName, List metricNameList, String defaultFieldName, String sortType) { + if (ValidateUtils.anyBlank(defaultFieldName, sortType) || ValidateUtils.isEmptyList(allDataList)||ValidateUtils.isEmptyList(metricNameList)) { + return allDataList; + } + + try { + Field metricField = FieldUtils.getField(allDataList.get(0).getClass(), metricFieldName, true); + Field defaultField = FieldUtils.getField(allDataList.get(0).getClass(), defaultFieldName, true); + if(ValidateUtils.anyNull(defaultField, metricField)) { + log.debug("method=sortMetrics||className={}||metricFieldName={}||metricNameList={}||defaultFieldName={}||metricSortType={}||msg=field not exist.", + allDataList.get(0).getClass().getSimpleName(), metricFieldName, metricNameList, defaultFieldName, sortType); + + // 字段不存在,则排序失效,直接返回 + return allDataList; + } + + Collections.sort(allDataList, (a1, a2) -> { + try { + Object m1 = FieldUtils.readField(a1, metricField.getName(), true); + Object m2 = FieldUtils.readField(a2, metricField.getName(), true); + + return compareFirstNotNullMetricValue((BaseMetrics)m1, (BaseMetrics)m2, metricNameList, defaultField); + } catch (Exception e) { + log.error("method=sortMetrics||className={}||metricFieldName={}||metricNameList={}||defaultFieldName={}||metricSortType={}||errMsg=exception.", + allDataList.get(0).getClass().getSimpleName(), metricFieldName, metricNameList, defaultFieldName, sortType, e); + } + + return 0; + }); + } catch (Exception e) { + log.error("method=sortMetrics||className={}||metricFieldName={}||metricNameList={}||defaultFieldName={}||metricSortType={}||errMsg=exception.", + allDataList.get(0).getClass().getSimpleName(), metricFieldName, metricNameList, defaultFieldName, sortType, e); + } + + if (!SortTypeEnum.DESC.getSortType().equals(sortType)) { + Collections.reverse(allDataList); + } + return allDataList; + } + + private static int compareFirstNotNullMetricValue(BaseMetrics a1, BaseMetrics a2, List metricNameList, Field defaultField) { + try { + // 指标数据排序 + Float m1 = null; + Float m2 = null; + + //获取第一个非空指标 + for (String metric : metricNameList) { + m1 = a1.getMetric(metric); + if (m1 != null) { + break; + } + } + for (String metric : metricNameList) { + m2 = a2.getMetric(metric); + if (m2 != null) { + break; + } + } + + if (m1 != null && m2 == null) { + return -1; + } else if (m1 == null && m2 != null) { + return 1; + } else if (m1 != null && m2 != null) { + // 两个都不为空,则进行大小比较 + int val = compareObject(m2, m1); + if (val != 0) { + return val; + } + } + + // 默认字段排序 + Object f1 = FieldUtils.readField(a1, defaultField.getName(), true); + Object f2 = FieldUtils.readField(a2, defaultField.getName(), true); + if (f1 != null && f2 != null) { + // 两个都不为空,则进行大小比较 + return compareObject(f2, f1); + } + if (f1 != null) { + return -1; + } else if (f2 != null) { + return 1; + } + + return 0; + } catch (Exception e) { + log.debug("method=sortMetricsObject||metricsA={}||metricsB={}||metricNameList={}||defaultFieldName={}||errMsg=exception.", + a1, a2, metricNameList, defaultField.getName(), e); + } + + return 0; + } + + + private static List sortMetricList(List allDataList, String metricName, String defaultSortField, String sortType) { if (ValidateUtils.anyBlank(metricName, defaultSortField, sortType) || ValidateUtils.isEmptyList(allDataList)) { return allDataList; diff --git a/km-core/pom.xml b/km-core/pom.xml index 578a0666..031b591a 100644 --- a/km-core/pom.xml +++ b/km-core/pom.xml @@ -120,5 +120,9 @@ org.apache.kafka kafka_2.13 + + org.apache.kafka + connect-runtime + \ No newline at end of file diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/cache/CollectedMetricsLocalCache.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/cache/CollectedMetricsLocalCache.java index 2fc0a4ff..7b12b0dc 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/cache/CollectedMetricsLocalCache.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/cache/CollectedMetricsLocalCache.java @@ -24,6 +24,17 @@ public class CollectedMetricsLocalCache { .maximumSize(10000) .build(); + private static final Cache connectClusterMetricsCache = Caffeine.newBuilder() + .expireAfterWrite(90, TimeUnit.SECONDS) + .maximumSize(10000) + .build(); + + private static final Cache connectorMetricsCache = Caffeine.newBuilder() + .expireAfterWrite(90, TimeUnit.SECONDS) + .maximumSize(10000) + .build(); + + public static Float getBrokerMetrics(String brokerMetricKey) { return brokerMetricsCache.getIfPresent(brokerMetricKey); } @@ -59,6 +70,28 @@ public class CollectedMetricsLocalCache { partitionMetricsCache.put(partitionMetricsKey, metricsList); } + public static void putConnectClusterMetrics(String connectClusterMetricKey, Float value) { + if (value == null) { + return; + } + connectClusterMetricsCache.put(connectClusterMetricKey, value); + } + + public static Float getConnectClusterMetrics(String connectClusterMetricKey) { + return connectClusterMetricsCache.getIfPresent(connectClusterMetricKey); + } + + public static void putConnectorMetrics(String connectClusterMetricKey, Float value) { + if (value == null) { + return; + } + connectorMetricsCache.put(connectClusterMetricKey, value); + } + + public static Float getConnectorMetrics(String connectClusterMetricKey) { + return connectorMetricsCache.getIfPresent(connectClusterMetricKey); + } + public static String genBrokerMetricKey(Long clusterPhyId, Integer brokerId, String metricName) { return clusterPhyId + "@" + brokerId + "@" + metricName; } @@ -71,6 +104,16 @@ public class CollectedMetricsLocalCache { return clusterPhyId + "@" + brokerId + "@" + topicName + "@" + partitionId + "@" + metricName; } + public static String genConnectClusterMetricCacheKey(Long connectClusterId, String metricName) { + return connectClusterId + "@" + metricName; + } + + public static String genConnectorMetricCacheKey(Long connectClusterId, String connectorName, String metricName) { + return connectClusterId + "@" + connectorName + '@' + metricName; + } + /**************************************************** private method ****************************************************/ + private CollectedMetricsLocalCache() { + } } diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/acl/impl/KafkaAclServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/acl/impl/KafkaAclServiceImpl.java index 836c7d56..8f1473cd 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/acl/impl/KafkaAclServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/acl/impl/KafkaAclServiceImpl.java @@ -17,6 +17,7 @@ import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistExcept import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; import com.xiaojukeji.know.streaming.km.core.service.acl.KafkaAclService; import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; import com.xiaojukeji.know.streaming.km.persistence.cache.LoadedClusterPhyCache; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient; @@ -47,7 +48,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum. @Service -public class KafkaAclServiceImpl extends BaseVersionControlService implements KafkaAclService { +public class KafkaAclServiceImpl extends BaseKafkaVersionControlService implements KafkaAclService { private static final ILog log = LogFactory.getLog(KafkaAclServiceImpl.class); private static final String ACL_GET_FROM_KAFKA = "getAclFromKafka"; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/acl/impl/OpKafkaAclServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/acl/impl/OpKafkaAclServiceImpl.java index 41c0bcdf..a8fab1f1 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/acl/impl/OpKafkaAclServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/acl/impl/OpKafkaAclServiceImpl.java @@ -19,6 +19,7 @@ import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException; import com.xiaojukeji.know.streaming.km.core.service.acl.OpKafkaAclService; import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient; @@ -47,7 +48,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum. @Service -public class OpKafkaAclServiceImpl extends BaseVersionControlService implements OpKafkaAclService { +public class OpKafkaAclServiceImpl extends BaseKafkaVersionControlService implements OpKafkaAclService { private static final ILog log = LogFactory.getLog(OpKafkaAclServiceImpl.class); private static final String ACL_CREATE = "createKafkaAcl"; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/broker/impl/BrokerConfigServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/broker/impl/BrokerConfigServiceImpl.java index ecfbfcde..f47a3fa5 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/broker/impl/BrokerConfigServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/broker/impl/BrokerConfigServiceImpl.java @@ -22,6 +22,7 @@ import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistExcept import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerConfigService; import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient; @@ -42,7 +43,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum. @Service -public class BrokerConfigServiceImpl extends BaseVersionControlService implements BrokerConfigService { +public class BrokerConfigServiceImpl extends BaseKafkaVersionControlService implements BrokerConfigService { private static final ILog log = LogFactory.getLog(BrokerConfigServiceImpl.class); private static final String GET_BROKER_CONFIG = "getBrokerConfig"; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/broker/impl/BrokerServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/broker/impl/BrokerServiceImpl.java index 0bd7f364..97dc00c8 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/broker/impl/BrokerServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/broker/impl/BrokerServiceImpl.java @@ -8,8 +8,8 @@ import com.github.benmanes.caffeine.cache.Caffeine; import com.xiaojukeji.know.streaming.km.common.bean.entity.broker.Broker; import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.entity.config.JmxConfig; -import com.xiaojukeji.know.streaming.km.common.bean.entity.param.broker.BrokerParam; import com.xiaojukeji.know.streaming.km.common.bean.entity.param.VersionItemParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.broker.BrokerParam; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus; import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.Topic; @@ -26,12 +26,12 @@ import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService; import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService; -import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import com.xiaojukeji.know.streaming.km.persistence.jmx.JmxDAO; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaJMXClient; -import com.xiaojukeji.know.streaming.km.persistence.mysql.broker.BrokerDAO; import com.xiaojukeji.know.streaming.km.persistence.kafka.zookeeper.service.KafkaZKDAO; +import com.xiaojukeji.know.streaming.km.persistence.mysql.broker.BrokerDAO; import kafka.zk.BrokerIdsZNode; import org.apache.kafka.clients.admin.*; import org.apache.kafka.common.Node; @@ -54,7 +54,7 @@ import static com.xiaojukeji.know.streaming.km.common.jmx.JmxAttribute.VERSION; import static com.xiaojukeji.know.streaming.km.common.jmx.JmxName.JMX_SERVER_APP_INFO; @Service -public class BrokerServiceImpl extends BaseVersionControlService implements BrokerService { +public class BrokerServiceImpl extends BaseKafkaVersionControlService implements BrokerService { private static final ILog log = LogFactory.getLog(BrokerServiceImpl.class); private static final String BROKER_LOG_DIR = "getLogDir"; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/cluster/impl/ClusterValidateServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/cluster/impl/ClusterValidateServiceImpl.java index ba72d2fe..50d54553 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/cluster/impl/ClusterValidateServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/cluster/impl/ClusterValidateServiceImpl.java @@ -16,7 +16,6 @@ import com.xiaojukeji.know.streaming.km.persistence.jmx.JmxDAO; import com.xiaojukeji.know.streaming.km.persistence.kafka.zookeeper.service.KafkaZKDAO; import com.xiaojukeji.know.streaming.km.persistence.kafka.zookeeper.service.impl.KafkaZKDAOImpl; import kafka.server.KafkaConfig; -import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.admin.*; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.KafkaConsumer; @@ -35,7 +34,6 @@ import java.util.*; * @author zengqiao * @date 22/02/28 */ -@Slf4j @Service public class ClusterValidateServiceImpl implements ClusterValidateService { private static final ILog logger = LogFactory.getLog(KafkaZKDAOImpl.class); diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/cluster/impl/ControllerChangeLogServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/cluster/impl/ControllerChangeLogServiceImpl.java index 57640e4f..5cec3851 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/cluster/impl/ControllerChangeLogServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/cluster/impl/ControllerChangeLogServiceImpl.java @@ -4,7 +4,6 @@ import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationSor import com.xiaojukeji.know.streaming.km.common.bean.po.ControllerChangeLogPO; import com.xiaojukeji.know.streaming.km.core.service.cluster.ControllerChangeLogService; import com.xiaojukeji.know.streaming.km.persistence.mysql.ControllerChangeLogDAO; -import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -12,7 +11,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -@Slf4j @Service public class ControllerChangeLogServiceImpl implements ControllerChangeLogService { diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/ConnectClusterMetricService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/ConnectClusterMetricService.java new file mode 100644 index 00000000..92ed79f0 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/ConnectClusterMetricService.java @@ -0,0 +1,27 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.cluster; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.connect.MetricsConnectClustersDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectClusterMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO; + +import java.util.List; + +/** + * @author didi + */ +public interface ConnectClusterMetricService { + + /** + * 从Kafka获取指标 + */ + Result collectConnectClusterMetricsFromKafkaWithCacheFirst(Long connectClusterPhyId, String metricName); + Result collectConnectClusterMetricsFromKafka(Long connectClusterPhyId, String metricName); + + /** + * 从ES中获取一段时间内聚合计算之后的指标线 + */ + Result> listConnectClusterMetricsFromES(Long clusterPhyId, MetricsConnectClustersDTO dto); + + boolean isMetricName(String str); +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/ConnectClusterService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/ConnectClusterService.java new file mode 100644 index 00000000..e6ad3929 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/ConnectClusterService.java @@ -0,0 +1,34 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.cluster; + + +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.cluster.ConnectClusterDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectClusterMetadata; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; + +import java.util.List; + +/** + * Connect-Cluster + */ +public interface ConnectClusterService { + Long replaceAndReturnIdInDB(ConnectClusterMetadata metadata); + + List listByKafkaCluster(Long kafkaClusterPhyId); + + List listAllClusters(); + + ConnectCluster getById(Long connectClusterId); + + ConnectCluster getByName(Long clusterPhyId, String connectClusterName); + + String getClusterVersion(Long connectClusterId); + + String getClusterName(Long connectClusterId); + + Result deleteInDB(Long connectClusterId, String operator); + + Result batchModifyInDB(List dtoList, String operator); + + Boolean existConnectClusterDown(Long kafkaClusterPhyId); +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/impl/ConnectClusterMetricServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/impl/ConnectClusterMetricServiceImpl.java new file mode 100644 index 00000000..5ed5af64 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/impl/ConnectClusterMetricServiceImpl.java @@ -0,0 +1,270 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.cluster.impl; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.google.common.collect.Table; +import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.connect.MetricsConnectClustersDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectClusterMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectWorkerMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.VersionItemParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.metric.connect.ConnectClusterMetricParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionJmxInfo; +import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.BrokerMetricPO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricLineVO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO; +import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum; +import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException; +import com.xiaojukeji.know.streaming.km.common.jmx.JmxConnectorWrap; +import com.xiaojukeji.know.streaming.km.common.utils.BeanUtil; +import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; +import com.xiaojukeji.know.streaming.km.core.cache.CollectedMetricsLocalCache; +import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterMetricService; +import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService; +import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseConnectorMetricService; +import com.xiaojukeji.know.streaming.km.persistence.connect.ConnectJMXClient; +import com.xiaojukeji.know.streaming.km.persistence.es.dao.connect.ConnectClusterMetricESDAO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.util.CollectionUtils; + +import javax.management.ObjectName; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus.*; + +/** + * @author didi + */ +@Service +public class ConnectClusterMetricServiceImpl extends BaseConnectorMetricService implements ConnectClusterMetricService { + protected static final ILog LOGGER = LogFactory.getLog(ConnectClusterMetricServiceImpl.class); + + public static final String CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_AVG = "getWorkerMetricAvg"; + + public static final String CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_SUM = "getWorkerMetricSum"; + + public static final String CONNECT_CLUSTER_METHOD_DO_NOTHING = "doNothing"; + + @Autowired + private ConnectClusterService connectClusterService; + + @Autowired + private ConnectClusterMetricESDAO connectClusterMetricESDAO; + + @Autowired + private ConnectJMXClient connectJMXClient; + + @Autowired + private WorkerService workerService; + + @Override + protected VersionItemTypeEnum getVersionItemType() { + return VersionItemTypeEnum.METRIC_CONNECT_CLUSTER; + } + + @Override + protected List listMetricPOFields() { + return BeanUtil.listBeanFields(BrokerMetricPO.class); + } + + @Override + protected void initRegisterVCHandler() { + registerVCHandler(CONNECT_CLUSTER_METHOD_DO_NOTHING, this::doNothing); + registerVCHandler(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_AVG, this::getConnectWorkerMetricAvg); + registerVCHandler(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_SUM, this::getConnectWorkerMetricSum); + } + + @Override + public Result collectConnectClusterMetricsFromKafkaWithCacheFirst(Long connectClusterPhyId, String metric) { + String connectClusterMetricKey = CollectedMetricsLocalCache.genConnectClusterMetricCacheKey(connectClusterPhyId, metric); + Float keyValue = CollectedMetricsLocalCache.getConnectClusterMetrics(connectClusterMetricKey); + if (keyValue != null) { + ConnectClusterMetrics connectClusterMetrics = ConnectClusterMetrics.initWithMetric(connectClusterPhyId,metric,keyValue); + return Result.buildSuc(connectClusterMetrics); + } + + Result ret = this.collectConnectClusterMetricsFromKafka(connectClusterPhyId, metric); + if (ret == null || !ret.hasData()) { + return ret; + } + + Map metricsMap = ret.getData().getMetrics(); + for (Map.Entry entry : metricsMap.entrySet()) { + CollectedMetricsLocalCache.putConnectClusterMetrics(entry.getKey(), entry.getValue()); + } + return ret; + } + + @Override + public Result collectConnectClusterMetricsFromKafka( Long connectClusterPhyId, String metric) { + try { + ConnectClusterMetricParam metricParam = new ConnectClusterMetricParam(connectClusterPhyId, metric); + return (Result) doVCHandler(connectClusterPhyId, metric, metricParam); + } catch (VCHandlerNotExistException e) { + return Result.buildFailure(VC_HANDLE_NOT_EXIST); + } + } + + @Override + public Result> listConnectClusterMetricsFromES(Long clusterPhyId, MetricsConnectClustersDTO dto) { + Long startTime = dto.getStartTime(); + Long endTime = dto.getEndTime(); + Integer topN = dto.getTopNu(); + String aggType = dto.getAggType(); + List connectClusterIdList = dto.getConnectClusterIdList(); + List metricNameList = dto.getMetricsNames(); + + Table> retTable; + if (ValidateUtils.isEmptyList(connectClusterIdList)) { + // 按照TopN的方式去获取 + List defaultConnectClusterIdList = this.listTopNConnectClusterIdList(clusterPhyId, topN); + + retTable = connectClusterMetricESDAO.listMetricsByTop(clusterPhyId, defaultConnectClusterIdList, metricNameList, aggType, topN, startTime, endTime); + } else { + // 制定集群ID去获取 + retTable = connectClusterMetricESDAO.listMetricsByConnectClusterIdList(clusterPhyId, metricNameList, aggType, connectClusterIdList, startTime, endTime); + } + + return Result.buildSuc(this.metricMap2VO(clusterPhyId, retTable.rowMap())); + } + + @Override + public boolean isMetricName(String str) { + return super.isMetricName(str); + } + + /**************************************************** private method ****************************************************/ + private Result doNothing(VersionItemParam metricParam) { + ConnectClusterMetricParam param = (ConnectClusterMetricParam) metricParam; + return Result.buildSuc(new ConnectClusterMetrics(null, param.getConnectClusterId())); + } + + private Result getConnectWorkerMetricAvg(VersionItemParam metricParam) { + ConnectClusterMetricParam param = (ConnectClusterMetricParam) metricParam; + Long connectClusterId = param.getConnectClusterId(); + String metric = param.getMetric(); + + Result> ret = this.getConnectWorkerMetricsByJMX(connectClusterId, metric); + if (ret == null || !ret.hasData() || ret.getData().isEmpty()) { + return Result.buildFailure(NOT_EXIST); + } + + //求均值 + Float value = ret.getData().stream().map(elem -> elem.getMetric(metric) == null ? 0 : elem.getMetric(metric)).reduce(Float::sum).get(); + ConnectClusterMetrics connectClusterMetrics = new ConnectClusterMetrics(null, connectClusterId); + connectClusterMetrics.putMetric(metric, value / ret.getData().size()); + return Result.buildSuc(connectClusterMetrics); + } + + private Result getConnectWorkerMetricSum(VersionItemParam metricParam) { + ConnectClusterMetricParam param = (ConnectClusterMetricParam) metricParam; + Long connectClusterId = param.getConnectClusterId(); + String metric = param.getMetric(); + + Result> ret = this.getConnectWorkerMetricsByJMX(connectClusterId, metric); + if (ret == null || !ret.hasData() || ret.getData().isEmpty()) { + return Result.buildFailure(NOT_EXIST); + } + + //求和 + Float value = ret.getData().stream().map(elem -> elem.getMetric(metric) == null ? 0 : elem.getMetric(metric)).reduce(Float::sum).get(); + ConnectClusterMetrics connectClusterMetrics = new ConnectClusterMetrics(null, connectClusterId); + connectClusterMetrics.putMetric(metric, value); + return Result.buildSuc(connectClusterMetrics); + } + + //获取workermetric列表 + private Result> getConnectWorkerMetricsByJMX(Long connectClusterId, String metric) { + + List workerIdList = workerService.listFromDB(connectClusterId).stream().map(elem -> elem.getWorkerId()).collect(Collectors.toList()); + List workerMetricsList = new ArrayList<>(); + + for (String workerId : workerIdList) { + Result ret = this.getConnectWorkerMetricByJMX(connectClusterId, workerId, metric); + if (ret == null || !ret.hasData() || ret.getData().getMetric(metric) == null) { + continue; + } + workerMetricsList.add(ret.getData()); + } + return Result.buildSuc(workerMetricsList); + } + + private Result getConnectWorkerMetricByJMX(Long connectClusterId, String workerId, String metric) { + VersionJmxInfo jmxInfo = getJMXInfo(connectClusterId, metric); + if (null == jmxInfo) { + return Result.buildFailure(VC_ITEM_JMX_NOT_EXIST); + } + + JmxConnectorWrap jmxConnectorWrap = connectJMXClient.getClientWithCheck(connectClusterId, workerId); + if (ValidateUtils.isNull(jmxConnectorWrap)) { + return Result.buildFailure(VC_JMX_INIT_ERROR); + } + try { + //2、获取jmx指标 + String value = jmxConnectorWrap.getAttribute(new ObjectName(jmxInfo.getJmxObjectName()), jmxInfo.getJmxAttribute()).toString(); + ConnectWorkerMetrics connectWorkerMetrics = ConnectWorkerMetrics.initWithMetric(connectClusterId, workerId, metric, Float.valueOf(value)); + return Result.buildSuc(connectWorkerMetrics); + } catch (Exception e) { + LOGGER.error("method=getConnectWorkerMetricsByJMX||connectClusterId={}||workerId={}||metrics={}||jmx={}||msg={}", + connectClusterId, workerId, metric, jmxInfo.getJmxObjectName(), e.getClass().getName()); + return Result.buildFailure(VC_JMX_CONNECT_ERROR); + } + } + + private List listTopNConnectClusterIdList(Long clusterPhyId, Integer topN) { + List connectClusters = connectClusterService.listByKafkaCluster(clusterPhyId); + + if (CollectionUtils.isEmpty(connectClusters)) { + return new ArrayList<>(); + } + + return connectClusters.subList(0, Math.min(topN, connectClusters.size())) + .stream() + .map(b -> b.getId().longValue()) + .collect(Collectors.toList()); + } + + protected List metricMap2VO(Long connectClusterId, + Map>> map){ + List multiLinesVOS = new ArrayList<>(); + if (map == null || map.isEmpty()) { + // 如果为空,则直接返回 + return multiLinesVOS; + } + + for(String metric : map.keySet()){ + try { + MetricMultiLinesVO multiLinesVO = new MetricMultiLinesVO(); + multiLinesVO.setMetricName(metric); + + List metricLines = new ArrayList<>(); + + Map> metricPointMap = map.get(metric); + if(null == metricPointMap || metricPointMap.isEmpty()){continue;} + + for(Map.Entry> entry : metricPointMap.entrySet()){ + MetricLineVO metricLineVO = new MetricLineVO(); + metricLineVO.setName(entry.getKey().toString()); + metricLineVO.setMetricName(metric); + metricLineVO.setMetricPoints(entry.getValue()); + + metricLines.add(metricLineVO); + } + + multiLinesVO.setMetricLines(metricLines); + multiLinesVOS.add(multiLinesVO); + }catch (Exception e){ + LOGGER.error("method=metricMap2VO||connectClusterId={}||msg=exception!", connectClusterId, e); + } + } + + return multiLinesVOS; + } +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/impl/ConnectClusterServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/impl/ConnectClusterServiceImpl.java new file mode 100644 index 00000000..6597b8ec --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/cluster/impl/ConnectClusterServiceImpl.java @@ -0,0 +1,243 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.cluster.impl; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.didiglobal.logi.security.common.dto.oplog.OplogDTO; +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.cluster.ConnectClusterDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectClusterMetadata; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectClusterPO; +import com.xiaojukeji.know.streaming.km.common.constant.KafkaConstant; +import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant; +import com.xiaojukeji.know.streaming.km.common.enums.group.GroupStateEnum; +import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.ModuleEnum; +import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.OperationEnum; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; +import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService; +import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService; +import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.ConnectClusterDAO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.DuplicateKeyException; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.transaction.interceptor.TransactionAspectSupport; + +import java.util.List; + +@Service +public class ConnectClusterServiceImpl implements ConnectClusterService { + private static final ILog LOGGER = LogFactory.getLog(ConnectClusterServiceImpl.class); + + @Autowired + private ConnectClusterDAO connectClusterDAO; + + @Autowired + private OpLogWrapService opLogWrapService; + + @Override + public Long replaceAndReturnIdInDB(ConnectClusterMetadata metadata) { + //url去斜杠 + String clusterUrl = metadata.getMemberLeaderUrl(); + if (clusterUrl.charAt(clusterUrl.length() - 1) == '/') { + clusterUrl = clusterUrl.substring(0, clusterUrl.length() - 1); + } + + ConnectClusterPO oldPO = this.getPOFromDB(metadata.getKafkaClusterPhyId(), metadata.getGroupName()); + if (oldPO == null) { + oldPO = new ConnectClusterPO(); + oldPO.setKafkaClusterPhyId(metadata.getKafkaClusterPhyId()); + oldPO.setGroupName(metadata.getGroupName()); + oldPO.setName(metadata.getGroupName()); + oldPO.setState(metadata.getState().getCode()); + oldPO.setMemberLeaderUrl(metadata.getMemberLeaderUrl()); + oldPO.setClusterUrl(clusterUrl); + oldPO.setVersion(KafkaConstant.DEFAULT_CONNECT_VERSION); + connectClusterDAO.insert(oldPO); + + oldPO = this.getPOFromDB(metadata.getKafkaClusterPhyId(), metadata.getGroupName()); + return oldPO == null? null: oldPO.getId(); + } + + oldPO.setKafkaClusterPhyId(metadata.getKafkaClusterPhyId()); + oldPO.setGroupName(metadata.getGroupName()); + oldPO.setState(metadata.getState().getCode()); + oldPO.setMemberLeaderUrl(metadata.getMemberLeaderUrl()); + if (ValidateUtils.isBlank(oldPO.getVersion())) { + oldPO.setVersion(KafkaConstant.DEFAULT_CONNECT_VERSION); + } + if (ValidateUtils.isBlank(oldPO.getClusterUrl())) { + oldPO.setClusterUrl(metadata.getMemberLeaderUrl()); + } + connectClusterDAO.updateById(oldPO); + + return oldPO.getId(); + } + + @Override + public List listByKafkaCluster(Long kafkaClusterPhyId) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectClusterPO::getKafkaClusterPhyId, kafkaClusterPhyId); + + return ConvertUtil.list2List(connectClusterDAO.selectList(lambdaQueryWrapper), ConnectCluster.class); + } + + @Override + public List listAllClusters() { + List connectClusterPOList = connectClusterDAO.selectList(null); + return ConvertUtil.list2List(connectClusterPOList, ConnectCluster.class); + } + + @Override + public ConnectCluster getById(Long connectClusterId) { + return ConvertUtil.obj2Obj(connectClusterDAO.selectById(connectClusterId), ConnectCluster.class); + } + + @Override + public ConnectCluster getByName(Long clusterPhyId, String connectClusterName) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectClusterPO::getKafkaClusterPhyId, clusterPhyId); + lambdaQueryWrapper.eq(ConnectClusterPO::getName, connectClusterName); + + return ConvertUtil.obj2Obj(connectClusterDAO.selectOne(lambdaQueryWrapper), ConnectCluster.class); + } + + @Override + public String getClusterVersion(Long connectClusterId) { + ConnectClusterPO connectClusterPO = connectClusterDAO.selectById(connectClusterId); + return null != connectClusterPO ? connectClusterPO.getVersion() : ""; + } + + @Override + public String getClusterName(Long connectClusterId) { + ConnectClusterPO connectClusterPO = connectClusterDAO.selectById(connectClusterId); + return null != connectClusterPO ? connectClusterPO.getName() : ""; + } + + @Override + public Result deleteInDB(Long connectClusterId, String operator) { + ConnectCluster connectCluster = this.getById(connectClusterId); + if (connectCluster == null) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + if (!GroupStateEnum.DEAD.getCode().equals(connectCluster.getState())) { + return Result.buildFromRSAndMsg(ResultStatus.OPERATION_FORBIDDEN, "只有集群处于Dead状态,才允许删除"); + } + + connectClusterDAO.deleteById(connectClusterId); + + opLogWrapService.saveOplogAndIgnoreException(new OplogDTO( + operator, + OperationEnum.DELETE.getDesc(), + ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(), + MsgConstant.getConnectClusterBizStr(connectCluster.getId(), connectCluster.getName()), + ConvertUtil.obj2Json(connectCluster) + )); + + return Result.buildSuc(); + } + + @Override + @Transactional + public Result batchModifyInDB(List dtoList, String operator) { + LOGGER.info("method=batchModifyInDB||data={}||operator={}", dtoList, operator); + + for (ConnectClusterDTO dto: dtoList) { + if (!dto.getClusterUrl().startsWith("http://") && !dto.getClusterUrl().startsWith("https://")) { + return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "clusterUrl必须以http或者https开头"); + } + } + + for (ConnectClusterDTO dto: dtoList) { + try { + ConnectClusterPO po = this.getRowById(dto.getId()); + if (po == null) { + // 回滚事务 + TransactionAspectSupport.currentTransactionStatus().setRollbackOnly(); + + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(dto.getId())); + } + + if (!ValidateUtils.isNull(dto.getName())) { + po.setName(dto.getName()); + } + + if (!ValidateUtils.isNull(dto.getClusterUrl())) { + String clusterUrl = dto.getClusterUrl(); + if (clusterUrl.charAt(clusterUrl.length() - 1) == '/') { + clusterUrl = clusterUrl.substring(0, clusterUrl.length() - 1); + } + po.setClusterUrl(clusterUrl); + } + if (!ValidateUtils.isNull(dto.getVersion())) { + po.setVersion(dto.getVersion()); + } + if (!ValidateUtils.isNull(dto.getJmxProperties())) { + po.setJmxProperties(dto.getJmxProperties()); + } + + connectClusterDAO.updateById(po); + + // 记录操作 + opLogWrapService.saveOplogAndIgnoreException( + new OplogDTO( + operator, + OperationEnum.EDIT.getDesc(), + ModuleEnum.KAFKA_CONNECT_CLUSTER.getDesc(), + MsgConstant.getConnectClusterBizStr(dto.getId(), dto.getName()), + ConvertUtil.obj2Json(po) + ) + ); + } catch (DuplicateKeyException dke) { + LOGGER.error( + "method=batchModifyInDB||data={}||operator={}||errMsg=connectCluster name duplicate", + dtoList, operator + ); + TransactionAspectSupport.currentTransactionStatus().setRollbackOnly(); + + return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "connect集群name重复"); + + } catch (Exception e) { + LOGGER.error( + "method=batchModifyInDB||data={}||operator={}||errMsg=exception", + dtoList, operator, e + ); + + // 回滚事务 + TransactionAspectSupport.currentTransactionStatus().setRollbackOnly(); + + return Result.buildFromRSAndMsg(ResultStatus.MYSQL_OPERATE_FAILED, e.getMessage()); + } + } + + return Result.buildSuc(); + } + + @Override + public Boolean existConnectClusterDown(Long kafkaClusterPhyId) { + List connectClusters = this.listByKafkaCluster(kafkaClusterPhyId); + for (ConnectCluster connectCluster : connectClusters) { + if (GroupStateEnum.getByState(String.valueOf(connectCluster.getState())) == GroupStateEnum.DEAD) + return true; + } + return false; + } + + /**************************************************** private method ****************************************************/ + + private ConnectClusterPO getPOFromDB(Long kafkaClusterPhyId, String groupName) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectClusterPO::getGroupName, groupName); + lambdaQueryWrapper.eq(ConnectClusterPO::getKafkaClusterPhyId, kafkaClusterPhyId); + + return connectClusterDAO.selectOne(lambdaQueryWrapper); + } + + public ConnectClusterPO getRowById(Long connectClusterId) { + return connectClusterDAO.selectById(connectClusterId); + } +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/ConnectorMetricService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/ConnectorMetricService.java new file mode 100644 index 00000000..67879088 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/ConnectorMetricService.java @@ -0,0 +1,36 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.connector; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.ClusterConnectorDTO; +import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.connect.MetricsConnectorsDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectorMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO; +import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum; + +import java.util.List; + +/** + * @author didi + */ +public interface ConnectorMetricService { + + /** + * 从Kafka获取指标 + */ + Result collectConnectClusterMetricsFromKafkaWithCacheFirst(Long connectClusterPhyId, String connectorName, String metricName); + + Result collectConnectClusterMetricsFromKafka(Long connectClusterPhyId, String connectorName, String metricName); + + Result collectConnectClusterMetricsFromKafka(Long connectClusterPhyId, String connectorName, String metricName, ConnectorTypeEnum connectorType); + + /** + * 从ES中获取一段时间内聚合计算之后的指标线 + */ + Result> listConnectClusterMetricsFromES(Long clusterPhyId, MetricsConnectorsDTO dto); + + Result> getLatestMetricsFromES(Long clusterPhyId, List connectorNameList, List metricNameList); + + Result getLatestMetricsFromES(Long connectClusterId, String connectorName, List metricsNames); + + boolean isMetricName(String str); +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/ConnectorService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/ConnectorService.java new file mode 100644 index 00000000..076f5c11 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/ConnectorService.java @@ -0,0 +1,59 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.connector; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorInfo; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorStateInfo; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO; +import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum; + +import java.util.List; +import java.util.Properties; +import java.util.Set; + +/** + * 查看Connector + */ +public interface ConnectorService { + Result createConnector(Long connectClusterId, String connectorName, Properties configs, String operator); + + /** + * 获取所有的连接器名称列表 + */ + Result> listConnectorsFromCluster(Long connectClusterId); + + /** + * 获取单个连接器信息 + */ + Result getConnectorInfoFromCluster(Long connectClusterId, String connectorName); + + Result> getConnectorTopicsFromCluster(Long connectClusterId, String connectorName); + + Result getConnectorStateInfoFromCluster(Long connectClusterId, String connectorName); + + Result getAllConnectorInfoFromCluster(Long connectClusterId, String connectorName); + + Result resumeConnector(Long connectClusterId, String connectorName, String operator); + + Result restartConnector(Long connectClusterId, String connectorName, String operator); + + Result stopConnector(Long connectClusterId, String connectorName, String operator); + + Result deleteConnector(Long connectClusterId, String connectorName, String operator); + + Result updateConnectorConfig(Long connectClusterId, String connectorName, Properties configs, String operator); + + void batchReplace(Long kafkaClusterPhyId, Long connectClusterId, List connectorList, Set allConnectorNameSet); + + void addNewToDB(KSConnector connector); + + List listByKafkaClusterIdFromDB(Long kafkaClusterPhyId); + + List listByConnectClusterIdFromDB(Long connectClusterId); + + int countByConnectClusterIdFromDB(Long connectClusterId); + + ConnectorPO getConnectorFromDB(Long connectClusterId, String connectorName); + + ConnectorTypeEnum getConnectorType(Long connectClusterId, String connectorName); +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/impl/ConnectorMetricServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/impl/ConnectorMetricServiceImpl.java new file mode 100644 index 00000000..10325e02 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/impl/ConnectorMetricServiceImpl.java @@ -0,0 +1,443 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.connector.impl; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.google.common.collect.Table; +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.ClusterConnectorDTO; +import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.connect.MetricsConnectorsDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.WorkerConnector; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectorMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectorTaskMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.VersionItemParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.metric.connect.ConnectorMetricParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionConnectJmxInfo; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO; +import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.BrokerMetricPO; +import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.connect.ConnectorMetricPO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricLineVO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO; +import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum; +import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum; +import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException; +import com.xiaojukeji.know.streaming.km.common.jmx.JmxConnectorWrap; +import com.xiaojukeji.know.streaming.km.common.utils.BeanUtil; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import com.xiaojukeji.know.streaming.km.common.utils.Tuple; +import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; +import com.xiaojukeji.know.streaming.km.core.cache.CollectedMetricsLocalCache; +import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorMetricService; +import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService; +import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerConnectorService; +import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerService; +import com.xiaojukeji.know.streaming.km.core.service.health.state.HealthStateService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseConnectorMetricService; +import com.xiaojukeji.know.streaming.km.persistence.connect.ConnectJMXClient; +import com.xiaojukeji.know.streaming.km.persistence.es.dao.connect.ConnectorMetricESDAO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.util.CollectionUtils; + +import javax.management.InstanceNotFoundException; +import javax.management.ObjectName; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus.*; + +/** + * @author didi + */ +@Service +public class ConnectorMetricServiceImpl extends BaseConnectorMetricService implements ConnectorMetricService { + protected static final ILog LOGGER = LogFactory.getLog(ConnectorMetricServiceImpl.class); + + public static final String CONNECTOR_METHOD_DO_NOTHING = "getConnectWorkerMetricSum"; + + public static final String CONNECTOR_METHOD_GET_CONNECT_WORKER_METRIC_SUM = "getConnectWorkerMetricSum"; + + public static final String CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_AVG = "getConnectorTaskMetricsAvg"; + + public static final String CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_MAX = "getConnectorTaskMetricsMax"; + + public static final String CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM = "getConnectorTaskMetricsSum"; + + public static final String CONNECTOR_METHOD_GET_METRIC_HEALTH_SCORE = "getMetricHealthScore"; + + @Autowired + private ConnectorMetricESDAO connectorMetricESDAO; + + @Autowired + private ConnectJMXClient connectJMXClient; + + @Autowired + private WorkerService workerService; + + @Autowired + private ConnectorService connectorService; + + @Autowired + private WorkerConnectorService workerConnectorService; + + @Autowired + private HealthStateService healthStateService; + + @Override + protected VersionItemTypeEnum getVersionItemType() { + return VersionItemTypeEnum.METRIC_CONNECT_CONNECTOR; + } + + @Override + protected List listMetricPOFields() { + return BeanUtil.listBeanFields(BrokerMetricPO.class); + } + + @Override + protected void initRegisterVCHandler() { + registerVCHandler(CONNECTOR_METHOD_DO_NOTHING, this::doNothing); + registerVCHandler(CONNECTOR_METHOD_GET_CONNECT_WORKER_METRIC_SUM, this::getConnectWorkerMetricSum); + registerVCHandler(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_AVG, this::getConnectorTaskMetricsAvg); + registerVCHandler(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_MAX, this::getConnectorTaskMetricsMax); + registerVCHandler(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, this::getConnectorTaskMetricsSum); + registerVCHandler(CONNECTOR_METHOD_GET_METRIC_HEALTH_SCORE, this::getMetricHealthScore); + } + + @Override + public Result collectConnectClusterMetricsFromKafkaWithCacheFirst(Long connectClusterPhyId, String connectorName, String metric) { + String connectorMetricKey = CollectedMetricsLocalCache.genConnectorMetricCacheKey(connectClusterPhyId, connectorName, metric); + Float keyValue = CollectedMetricsLocalCache.getConnectorMetrics(connectorMetricKey); + + if (null != keyValue) { + ConnectorMetrics connectorMetrics = ConnectorMetrics.initWithMetric(connectClusterPhyId, connectorName, metric, keyValue); + return Result.buildSuc(connectorMetrics); + } + + Result ret = this.collectConnectClusterMetricsFromKafka(connectClusterPhyId, connectorName, metric); + if (ret == null || !ret.hasData()) { + return ret; + } + + Map metricMap = ret.getData().getMetrics(); + for (Map.Entry entry : metricMap.entrySet()) { + CollectedMetricsLocalCache.putConnectorMetrics(entry.getKey(), entry.getValue()); + } + return ret; + } + + @Override + public Result collectConnectClusterMetricsFromKafka(Long connectClusterPhyId, String connectorName, String metricName) { + try { + ConnectorMetricParam metricParam = new ConnectorMetricParam(connectClusterPhyId, connectorName, metricName, null); + return (Result) doVCHandler(connectClusterPhyId, metricName, metricParam); + } catch (VCHandlerNotExistException e) { + return Result.buildFailure(VC_HANDLE_NOT_EXIST); + } + } + + @Override + public Result collectConnectClusterMetricsFromKafka(Long connectClusterPhyId, String connectorName, String metricName, ConnectorTypeEnum connectorType) { + try { + ConnectorMetricParam metricParam = new ConnectorMetricParam(connectClusterPhyId, connectorName, metricName, connectorType); + return (Result) doVCHandler(connectClusterPhyId, metricName, metricParam); + } catch (VCHandlerNotExistException e) { + return Result.buildFailure(VC_HANDLE_NOT_EXIST); + } + } + + @Override + public Result> listConnectClusterMetricsFromES(Long clusterPhyId, MetricsConnectorsDTO dto) { + Long startTime = dto.getStartTime(); + Long endTime = dto.getEndTime(); + Integer topN = dto.getTopNu(); + String aggType = dto.getAggType(); + List metricNameList = dto.getMetricsNames(); + + List> connectorList = new ArrayList<>(); + if(!CollectionUtils.isEmpty(dto.getConnectorNameList())){ + connectorList = dto.getConnectorNameList().stream() + .map(c -> new Tuple<>(c.getConnectClusterId(), c.getConnectorName())) + .collect(Collectors.toList()); + } + + Table, List> retTable; + if(ValidateUtils.isEmptyList(connectorList)) { + // 按照TopN的方式去获取 + List> defaultConnectorList = this.listTopNConnectorList(clusterPhyId, topN); + + retTable = connectorMetricESDAO.listMetricsByTopN(clusterPhyId, defaultConnectorList, metricNameList, aggType, topN, startTime, endTime); + } else { + // 制定集群ID去获取 + retTable = connectorMetricESDAO.listMetricsByConnectors(clusterPhyId, metricNameList, aggType, connectorList, startTime, endTime); + } + + return Result.buildSuc(this.metricMap2VO(clusterPhyId, retTable.rowMap())); + } + + @Override + public Result> getLatestMetricsFromES(Long clusterPhyId, List connectorNameList, List metricsNames) { + List> connectClusterIdAndConnectorNameList = connectorNameList + .stream() + .map(elem -> new Tuple<>(elem.getConnectClusterId(), elem.getConnectorName())) + .collect(Collectors.toList()); + + List poList = + connectorMetricESDAO.getConnectorLatestMetric(clusterPhyId, connectClusterIdAndConnectorNameList, metricsNames); + + return Result.buildSuc(ConvertUtil.list2List(poList, ConnectorMetrics.class)); + } + + @Override + public Result getLatestMetricsFromES(Long connectClusterId, String connectorName, List metricsNames) { + ConnectorMetricPO connectorMetricPO = connectorMetricESDAO.getConnectorLatestMetric( + null, connectClusterId, connectorName, metricsNames); + return Result.buildSuc(ConvertUtil.obj2Obj(connectorMetricPO, ConnectorMetrics.class)); + } + + @Override + public boolean isMetricName(String str) { + return super.isMetricName(str); + } + + /**************************************************** private method ****************************************************/ + private Result doNothing(VersionItemParam metricParam){ + ConnectorMetricParam param = (ConnectorMetricParam) metricParam; + return Result.buildSuc(new ConnectorMetrics(param.getConnectClusterId(), param.getConnectorName())); + } + + private Result getMetricHealthScore(VersionItemParam metricParam) { + ConnectorMetricParam param = (ConnectorMetricParam) metricParam; + Long connectClusterId = param.getConnectClusterId(); + String connectorName = param.getConnectorName(); + + ConnectorMetrics metrics = healthStateService.calConnectorHealthMetrics(connectClusterId, connectorName); + return Result.buildSuc(metrics); + } + + private Result getConnectWorkerMetricSum(VersionItemParam metricParam) { + ConnectorMetricParam param = (ConnectorMetricParam) metricParam; + Long connectClusterId = param.getConnectClusterId(); + String connectorName = param.getConnectorName(); + String metric = param.getMetricName(); + ConnectorTypeEnum connectorType = param.getConnectorType(); + + float sum = 0; + boolean isCollected = false; + //根据connectClusterId获取connectMemberId列表 + List workerIdList = workerService.listFromDB(connectClusterId).stream().map(elem -> elem.getWorkerId()).collect(Collectors.toList()); + for (String workerId : workerIdList) { + Result ret = this.getConnectorMetric(connectClusterId, workerId, connectorName, metric, connectorType); + + if (ret == null || !ret.hasData() || ret.getData().getMetric(metric) == null) { + continue; + } + + isCollected = true; + sum += ret.getData().getMetric(metric); + } + if (!isCollected) { + return Result.buildFailure(NOT_EXIST); + } + return Result.buildSuc(ConnectorMetrics.initWithMetric(connectClusterId, connectorName, metric, sum)); + } + + //kafka.connect:type=connect-worker-metrics,connector="{connector}" 指标 + private Result getConnectorMetric(Long connectClusterId, String workerId, String connectorName, String metric, ConnectorTypeEnum connectorType) { + VersionConnectJmxInfo jmxInfo = getJMXInfo(connectClusterId, metric); + + if (jmxInfo.getType() != null) { + if (connectorType == null) { + connectorType = connectorService.getConnectorType(connectClusterId, connectorName); + } + + if (connectorType != jmxInfo.getType()) { + return Result.buildFailure(VC_JMX_INSTANCE_NOT_FOUND); + } + } + + if (null == jmxInfo) { + return Result.buildFailure(VC_ITEM_JMX_NOT_EXIST); + } + String jmxObjectName = String.format(jmxInfo.getJmxObjectName(), connectorName); + + JmxConnectorWrap jmxConnectorWrap = connectJMXClient.getClientWithCheck(connectClusterId, workerId); + if (ValidateUtils.isNull(jmxConnectorWrap)) { + return Result.buildFailure(VC_JMX_INIT_ERROR); + } + + try { + //2、获取jmx指标 + String value = jmxConnectorWrap.getAttribute(new ObjectName(jmxObjectName), jmxInfo.getJmxAttribute()).toString(); + ConnectorMetrics connectorMetrics = ConnectorMetrics.initWithMetric(connectClusterId, connectorName, metric, Float.valueOf(value)); + return Result.buildSuc(connectorMetrics); + } catch (InstanceNotFoundException e) { + // 忽略该错误,该错误出现的原因是该指标在JMX中不存在 + return Result.buildSuc(new ConnectorMetrics(connectClusterId, connectorName)); + } catch (Exception e) { + LOGGER.error("method=getConnectorMetric||connectClusterId={}||workerId={}||connectorName={}||metrics={}||jmx={}||msg={}", + connectClusterId, workerId, connectorName, metric, jmxObjectName, e.getClass().getName()); + return Result.buildFailure(VC_JMX_CONNECT_ERROR); + } + } + + + private Result getConnectorTaskMetricsAvg(VersionItemParam metricParam){ + ConnectorMetricParam param = (ConnectorMetricParam) metricParam; + Long connectClusterId = param.getConnectClusterId(); + String connectorName = param.getConnectorName(); + String metric = param.getMetricName(); + ConnectorTypeEnum connectorType = param.getConnectorType(); + + Result> ret = this.getConnectorTaskMetricList(connectClusterId, connectorName, metric, connectorType); + if (ret == null || !ret.hasData() || ret.getData().isEmpty()) { + return Result.buildFailure(NOT_EXIST); + } + + Float sum = ret.getData().stream().map(elem -> elem.getMetric(metric)).reduce(Float::sum).get(); + ConnectorMetrics connectorMetrics = ConnectorMetrics.initWithMetric(connectClusterId, connectorName, metric, sum / ret.getData().size()); + return Result.buildSuc(connectorMetrics); + } + + private Result getConnectorTaskMetricsMax(VersionItemParam metricParam){ + ConnectorMetricParam param = (ConnectorMetricParam) metricParam; + Long connectClusterId = param.getConnectClusterId(); + String connectorName = param.getConnectorName(); + String metric = param.getMetricName(); + ConnectorTypeEnum connectorType = param.getConnectorType(); + + Result> ret = this.getConnectorTaskMetricList(connectClusterId, connectorName, metric, connectorType); + if (ret == null || !ret.hasData() || ret.getData().isEmpty()) { + return Result.buildFailure(NOT_EXIST); + } + + Float sum = ret.getData().stream().max((a, b) -> a.getMetric(metric).compareTo(b.getMetric(metric))).get().getMetric(metric); + ConnectorMetrics connectorMetrics = ConnectorMetrics.initWithMetric(connectClusterId, connectorName, metric, sum / ret.getData().size()); + return Result.buildSuc(connectorMetrics); + } + + private Result getConnectorTaskMetricsSum(VersionItemParam metricParam){ + ConnectorMetricParam param = (ConnectorMetricParam) metricParam; + Long connectClusterId = param.getConnectClusterId(); + String connectorName = param.getConnectorName(); + String metric = param.getMetricName(); + ConnectorTypeEnum connectorType = param.getConnectorType(); + + Result> ret = this.getConnectorTaskMetricList(connectClusterId, connectorName, metric, connectorType); + if (ret == null || !ret.hasData() || ret.getData().isEmpty()) { + return Result.buildFailure(NOT_EXIST); + } + + Float sum = ret.getData().stream().map(elem -> elem.getMetric(metric)).reduce(Float::sum).get(); + ConnectorMetrics connectorMetrics = ConnectorMetrics.initWithMetric(connectClusterId, connectorName, metric, sum); + return Result.buildSuc(connectorMetrics); + } + + + private Result> getConnectorTaskMetricList(Long connectClusterId, String connectorName, String metricName, ConnectorTypeEnum connectorType) { + List connectorTaskMetricsList = new ArrayList<>(); + List workerConnectorList = workerConnectorService.listFromDB(connectClusterId).stream().filter(elem -> elem.getConnectorName().equals(connectorName)).collect(Collectors.toList()); + + if (workerConnectorList.isEmpty()) { + return Result.buildFailure(NOT_EXIST); + } + + for (WorkerConnector workerConnector : workerConnectorList) { + Result ret = getConnectorTaskMetric(connectClusterId, workerConnector.getWorkerId(), connectorName, workerConnector.getTaskId(), metricName, connectorType); + + if (ret == null || !ret.hasData() || ret.getData().getMetric(metricName) == null) { + continue; + } + + connectorTaskMetricsList.add(ret.getData()); + } + return Result.buildSuc(connectorTaskMetricsList); + } + + + private Result getConnectorTaskMetric(Long connectClusterId, String workerId, String connectorName, Integer taskId, String metric, ConnectorTypeEnum connectorType) { + VersionConnectJmxInfo jmxInfo = getJMXInfo(connectClusterId, metric); + + if (jmxInfo.getType() != null) { + if (connectorType == null) { + connectorType = connectorService.getConnectorType(connectClusterId, connectorName); + } + + if (connectorType != jmxInfo.getType()) { + return Result.buildFailure(VC_JMX_INSTANCE_NOT_FOUND); + } + } + + if (null == jmxInfo) { + return Result.buildFailure(VC_ITEM_JMX_NOT_EXIST); + } + String jmxObjectName=String.format(jmxInfo.getJmxObjectName(), connectorName, taskId); + + JmxConnectorWrap jmxConnectorWrap = connectJMXClient.getClientWithCheck(connectClusterId, workerId); + if (ValidateUtils.isNull(jmxConnectorWrap)) { + return Result.buildFailure(VC_JMX_INIT_ERROR); + } + + try { + //2、获取jmx指标 + String value = jmxConnectorWrap.getAttribute(new ObjectName(jmxObjectName), jmxInfo.getJmxAttribute()).toString(); + ConnectorTaskMetrics connectorTaskMetrics = ConnectorTaskMetrics.initWithMetric(connectClusterId, connectorName, taskId, metric, Float.valueOf(value)); + return Result.buildSuc(connectorTaskMetrics); + } catch (Exception e) { + LOGGER.error("method=getConnectorTaskMetric||connectClusterId={}||workerId={}||connectorName={}||taskId={}||metrics={}||jmx={}||msg={}", + connectClusterId, workerId, connectorName, taskId, metric, jmxObjectName, e.getClass().getName()); + return Result.buildFailure(VC_JMX_CONNECT_ERROR); + } + } + + private List> listTopNConnectorList(Long clusterPhyId, Integer topN) { + List connectorPOS = connectorService.listByKafkaClusterIdFromDB(clusterPhyId); + + if (CollectionUtils.isEmpty(connectorPOS)) { + return new ArrayList<>(); + } + + return connectorPOS.subList(0, Math.min(topN, connectorPOS.size())) + .stream() + .map( c -> new Tuple<>(c.getId(), c.getConnectorName()) ) + .collect(Collectors.toList()); + } + + protected List metricMap2VO(Long connectClusterId, + Map, List>> map){ + List multiLinesVOS = new ArrayList<>(); + if (map == null || map.isEmpty()) { + // 如果为空,则直接返回 + return multiLinesVOS; + } + + for(String metric : map.keySet()){ + try { + MetricMultiLinesVO multiLinesVO = new MetricMultiLinesVO(); + multiLinesVO.setMetricName(metric); + + List metricLines = new ArrayList<>(); + + Map, List> metricPointMap = map.get(metric); + if(null == metricPointMap || metricPointMap.isEmpty()){continue;} + + for(Map.Entry, List> entry : metricPointMap.entrySet()){ + MetricLineVO metricLineVO = new MetricLineVO(); + metricLineVO.setName(entry.getKey().getV1() + "#" + entry.getKey().getV2()); + metricLineVO.setMetricName(metric); + metricLineVO.setMetricPoints(entry.getValue()); + + metricLines.add(metricLineVO); + } + + multiLinesVO.setMetricLines(metricLines); + multiLinesVOS.add(multiLinesVO); + }catch (Exception e){ + LOGGER.error("method=metricMap2VO||connectClusterId={}||msg=exception!", connectClusterId, e); + } + } + + return multiLinesVOS; + } +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/impl/ConnectorServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/impl/ConnectorServiceImpl.java new file mode 100644 index 00000000..9d2136a9 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/connector/impl/ConnectorServiceImpl.java @@ -0,0 +1,581 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.connector.impl; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.didiglobal.logi.security.common.dto.oplog.OplogDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorInfo; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorStateInfo; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO; +import com.xiaojukeji.know.streaming.km.common.component.RestTool; +import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant; +import com.xiaojukeji.know.streaming.km.common.converter.ConnectConverter; +import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum; +import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.ModuleEnum; +import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.OperationEnum; +import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum; +import com.xiaojukeji.know.streaming.km.common.utils.BackoffUtils; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; +import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService; +import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService; +import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; +import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.ConnectorDAO; +import org.apache.kafka.connect.runtime.rest.entities.ActiveTopicsInfo; +import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.DuplicateKeyException; +import org.springframework.stereotype.Service; + +import java.util.*; + +import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.SERVICE_OP_CONNECT_CONNECTOR; + +@Service +public class ConnectorServiceImpl extends BaseVersionControlService implements ConnectorService { + private static final ILog LOGGER = LogFactory.getLog(ConnectorServiceImpl.class); + + @Autowired + private RestTool restTool; + + @Autowired + private ConnectorDAO connectorDAO; + + @Autowired + private ConnectClusterService connectClusterService; + + @Autowired + private OpLogWrapService opLogWrapService; + + private static final String LIST_CONNECTORS_URI = "/connectors"; + private static final String GET_CONNECTOR_INFO_PREFIX_URI = "/connectors"; + private static final String GET_CONNECTOR_TOPICS_URI = "/connectors/%s/topics"; + private static final String GET_CONNECTOR_STATUS_URI = "/connectors/%s/status"; + + private static final String CREATE_CONNECTOR_URI = "/connectors"; + private static final String RESUME_CONNECTOR_URI = "/connectors/%s/resume"; + private static final String RESTART_CONNECTOR_URI = "/connectors/%s/restart"; + private static final String PAUSE_CONNECTOR_URI = "/connectors/%s/pause"; + private static final String DELETE_CONNECTOR_URI = "/connectors/%s"; + private static final String UPDATE_CONNECTOR_CONFIG_URI = "/connectors/%s/config"; + + @Override + protected VersionItemTypeEnum getVersionItemType() { + return SERVICE_OP_CONNECT_CONNECTOR; + } + + @Override + public Result createConnector(Long connectClusterId, String connectorName, Properties configs, String operator) { + try { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + // 构造参数 + Properties props = new Properties(); + props.put("name", connectorName); + props.put("config", configs); + + ConnectorInfo connectorInfo = restTool.postObjectWithJsonContent( + connectCluster.getClusterUrl() + CREATE_CONNECTOR_URI, + props, + ConnectorInfo.class + ); + + opLogWrapService.saveOplogAndIgnoreException(new OplogDTO( + operator, + OperationEnum.ADD.getDesc(), + ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(), + MsgConstant.getConnectorBizStr(connectClusterId, connectorName), + ConvertUtil.obj2Json(configs) + )); + + KSConnectorInfo connector = new KSConnectorInfo(); + connector.setConnectClusterId(connectClusterId); + connector.setConfig(connectorInfo.config()); + connector.setName(connectorInfo.name()); + connector.setTasks(connectorInfo.tasks()); + connector.setType(connectorInfo.type()); + + return Result.buildSuc(connector); + } catch (Exception e) { + LOGGER.error( + "method=createConnector||connectClusterId={}||connectorName={}||configs={}||operator={}||errMsg=exception", + connectClusterId, connectorName, configs, operator, e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } + + @Override + public Result> listConnectorsFromCluster(Long connectClusterId) { + try { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + List nameList = restTool.getArrayObjectWithJsonContent( + connectCluster.getClusterUrl() + LIST_CONNECTORS_URI, + new HashMap<>(), + String.class + ); + + return Result.buildSuc(nameList); + } catch (Exception e) { + LOGGER.error( + "method=listConnectorsFromCluster||connectClusterId={}||errMsg=exception", + connectClusterId, e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } + + @Override + public Result getConnectorInfoFromCluster(Long connectClusterId, String connectorName) { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + return this.getConnectorInfoFromCluster(connectCluster, connectorName); + } + + @Override + public Result> getConnectorTopicsFromCluster(Long connectClusterId, String connectorName) { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + return this.getConnectorTopicsFromCluster(connectCluster, connectorName); + } + + @Override + public Result getConnectorStateInfoFromCluster(Long connectClusterId, String connectorName) { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + return this.getConnectorStateInfoFromCluster(connectCluster, connectorName); + } + + @Override + public Result getAllConnectorInfoFromCluster(Long connectClusterId, String connectorName) { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + Result connectorResult = this.getConnectorInfoFromCluster(connectCluster, connectorName); + if (connectorResult.failed()) { + LOGGER.error( + "method=getAllConnectorInfoFromCluster||connectClusterId={}||connectorName={}||result={}", + connectClusterId, connectorName, connectorResult + ); + + return Result.buildFromIgnoreData(connectorResult); + } + + Result> topicNameListResult = this.getConnectorTopicsFromCluster(connectCluster, connectorName); + if (topicNameListResult.failed()) { + LOGGER.error( + "method=getAllConnectorInfoFromCluster||connectClusterId={}||connectorName={}||result={}", + connectClusterId, connectorName, connectorResult + ); + } + + Result stateInfoResult = this.getConnectorStateInfoFromCluster(connectCluster, connectorName); + if (stateInfoResult.failed()) { + LOGGER.error( + "method=getAllConnectorInfoFromCluster||connectClusterId={}||connectorName={}||result={}", + connectClusterId, connectorName, connectorResult + ); + } + + return Result.buildSuc(ConnectConverter.convert2KSConnector( + connectCluster.getKafkaClusterPhyId(), + connectCluster.getId(), + connectorResult.getData(), + stateInfoResult.getData(), + topicNameListResult.getData() + )); + } + + @Override + public Result resumeConnector(Long connectClusterId, String connectorName, String operator) { + try { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + restTool.putJsonForObject( + connectCluster.getClusterUrl() + String.format(RESUME_CONNECTOR_URI, connectorName), + new HashMap<>(), + String.class + ); + + this.updateStatus(connectCluster, connectClusterId, connectorName); + + opLogWrapService.saveOplogAndIgnoreException(new OplogDTO( + operator, + OperationEnum.ENABLE.getDesc(), + ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(), + MsgConstant.getConnectorBizStr(connectClusterId, connectorName), + "" + )); + + return Result.buildSuc(); + } catch (Exception e) { + LOGGER.error( + "class=ConnectorServiceImpl||method=resumeConnector||connectClusterId={}||errMsg=exception", + connectClusterId, e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } + + @Override + public Result restartConnector(Long connectClusterId, String connectorName, String operator) { + try { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + restTool.postObjectWithJsonContent( + connectCluster.getClusterUrl() + String.format(RESTART_CONNECTOR_URI, connectorName), + new HashMap<>(), + String.class + ); + + this.updateStatus(connectCluster, connectClusterId, connectorName); + + opLogWrapService.saveOplogAndIgnoreException(new OplogDTO( + operator, + OperationEnum.RESTART.getDesc(), + ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(), + MsgConstant.getConnectorBizStr(connectClusterId, connectorName), + "" + )); + + return Result.buildSuc(); + } catch (Exception e) { + LOGGER.error( + "method=restartConnector||connectClusterId={}||errMsg=exception", + connectClusterId, e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } + + @Override + public Result stopConnector(Long connectClusterId, String connectorName, String operator) { + try { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + restTool.putJsonForObject( + connectCluster.getClusterUrl() + String.format(PAUSE_CONNECTOR_URI, connectorName), + new HashMap<>(), + String.class + ); + + this.updateStatus(connectCluster, connectClusterId, connectorName); + + opLogWrapService.saveOplogAndIgnoreException(new OplogDTO( + operator, + OperationEnum.DISABLE.getDesc(), + ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(), + MsgConstant.getConnectorBizStr(connectClusterId, connectorName), + "" + )); + + return Result.buildSuc(); + } catch (Exception e) { + LOGGER.error( + "method=stopConnector||connectClusterId={}||errMsg=exception", + connectClusterId, e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } + + @Override + public Result deleteConnector(Long connectClusterId, String connectorName, String operator) { + try { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + restTool.deleteWithParamsAndHeader( + connectCluster.getClusterUrl() + String.format(DELETE_CONNECTOR_URI, connectorName), + new HashMap<>(), + new HashMap<>(), + String.class + ); + + opLogWrapService.saveOplogAndIgnoreException(new OplogDTO( + operator, + OperationEnum.DELETE.getDesc(), + ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(), + MsgConstant.getConnectorBizStr(connectClusterId, connectorName), + "" + )); + + this.deleteConnectorInDB(connectClusterId, connectorName); + + return Result.buildSuc(); + } catch (Exception e) { + LOGGER.error( + "method=deleteConnector||connectClusterId={}||errMsg=exception", + connectClusterId, e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } + + @Override + public Result updateConnectorConfig(Long connectClusterId, String connectorName, Properties configs, String operator) { + try { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + ConnectorInfo connectorInfo = restTool.putJsonForObject( + connectCluster.getClusterUrl() + String.format(UPDATE_CONNECTOR_CONFIG_URI, connectorName), + configs, + org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo.class + ); + + this.updateStatus(connectCluster, connectClusterId, connectorName); + + opLogWrapService.saveOplogAndIgnoreException(new OplogDTO( + operator, + OperationEnum.EDIT.getDesc(), + ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(), + MsgConstant.getConnectorBizStr(connectClusterId, connectorName), + ConvertUtil.obj2Json(configs) + )); + + return Result.buildSuc(); + } catch (Exception e) { + LOGGER.error( + "method=updateConnectorConfig||connectClusterId={}||errMsg=exception", + connectClusterId, e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } + + @Override + public void batchReplace(Long kafkaClusterPhyId, Long connectClusterId, List connectorList, Set allConnectorNameSet) { + List poList = this.listByConnectClusterIdFromDB(connectClusterId); + + Map oldPOMap = new HashMap<>(); + poList.forEach(elem -> oldPOMap.put(elem.getConnectorName(), elem)); + + for (KSConnector connector: connectorList) { + try { + ConnectorPO oldPO = oldPOMap.remove(connector.getConnectorName()); + if (oldPO == null) { + oldPO = ConvertUtil.obj2Obj(connector, ConnectorPO.class); + connectorDAO.insert(oldPO); + } else { + ConnectorPO newPO = ConvertUtil.obj2Obj(connector, ConnectorPO.class); + newPO.setId(oldPO.getId()); + connectorDAO.updateById(newPO); + } + } catch (DuplicateKeyException dke) { + // ignore + } + } + + try { + oldPOMap.values().forEach(elem -> { + if (allConnectorNameSet.contains(elem.getConnectorName())) { + // 当前connector还存在 + return; + } + + // 当前connector不存在了,则进行删除 + connectorDAO.deleteById(elem.getId()); + }); + } catch (Exception e) { + // ignore + } + } + + @Override + public void addNewToDB(KSConnector connector) { + try { + connectorDAO.insert(ConvertUtil.obj2Obj(connector, ConnectorPO.class)); + } catch (DuplicateKeyException dke) { + // ignore + } + } + + @Override + public List listByKafkaClusterIdFromDB(Long kafkaClusterPhyId) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectorPO::getKafkaClusterPhyId, kafkaClusterPhyId); + + return connectorDAO.selectList(lambdaQueryWrapper); + } + + @Override + public List listByConnectClusterIdFromDB(Long connectClusterId) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectorPO::getConnectClusterId, connectClusterId); + + return connectorDAO.selectList(lambdaQueryWrapper); + } + + @Override + public int countByConnectClusterIdFromDB(Long connectClusterId) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectorPO::getConnectClusterId, connectClusterId); + + return connectorDAO.selectCount(lambdaQueryWrapper); + } + + @Override + public ConnectorPO getConnectorFromDB(Long connectClusterId, String connectorName) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectorPO::getConnectClusterId, connectClusterId); + lambdaQueryWrapper.eq(ConnectorPO::getConnectorName, connectorName); + + return connectorDAO.selectOne(lambdaQueryWrapper); + } + + @Override + public ConnectorTypeEnum getConnectorType(Long connectClusterId, String connectorName) { + ConnectorTypeEnum connectorType = ConnectorTypeEnum.UNKNOWN; + ConnectorPO connector = this.getConnectorFromDB(connectClusterId, connectorName); + if (connector != null) { + connectorType = ConnectorTypeEnum.getByName(connector.getConnectorType()); + } + return connectorType; + } + + /**************************************************** private method ****************************************************/ + private int deleteConnectorInDB(Long connectClusterId, String connectorName) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectorPO::getConnectClusterId, connectClusterId); + lambdaQueryWrapper.eq(ConnectorPO::getConnectorName, connectorName); + + return connectorDAO.delete(lambdaQueryWrapper); + } + + private Result getConnectorInfoFromCluster(ConnectCluster connectCluster, String connectorName) { + try { + ConnectorInfo connectorInfo = restTool.getForObject( + connectCluster.getClusterUrl() + GET_CONNECTOR_INFO_PREFIX_URI + "/" + connectorName, + new HashMap<>(), + ConnectorInfo.class + ); + + KSConnectorInfo connector = new KSConnectorInfo(); + connector.setConnectClusterId(connectCluster.getId()); + connector.setConfig(connectorInfo.config()); + connector.setName(connectorInfo.name()); + connector.setTasks(connectorInfo.tasks()); + connector.setType(connectorInfo.type()); + + return Result.buildSuc(connector); + } catch (Exception e) { + LOGGER.error( + "method=getConnectorInfoFromCluster||connectClusterId={}||connectorName={}||errMsg=exception", + connectCluster.getId(), connectorName, e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } + + private Result> getConnectorTopicsFromCluster(ConnectCluster connectCluster, String connectorName) { + try { + Properties properties = restTool.getForObject( + connectCluster.getClusterUrl() + String.format(GET_CONNECTOR_TOPICS_URI, connectorName), + new HashMap<>(), + Properties.class + ); + + ActiveTopicsInfo activeTopicsInfo = ConvertUtil.toObj(ConvertUtil.obj2Json(properties.get(connectorName)), ActiveTopicsInfo.class); + return Result.buildSuc(new ArrayList<>(activeTopicsInfo.topics())); + } catch (Exception e) { + LOGGER.error( + "method=getConnectorTopicsFromCluster||connectClusterId={}||connectorName={}||errMsg=exception", + connectCluster.getId(), connectorName, e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } + + private Result getConnectorStateInfoFromCluster(ConnectCluster connectCluster, String connectorName) { + try { + KSConnectorStateInfo connectorStateInfo = restTool.getForObject( + connectCluster.getClusterUrl() + String.format(GET_CONNECTOR_STATUS_URI, connectorName), + new HashMap<>(), + KSConnectorStateInfo.class + ); + + return Result.buildSuc(connectorStateInfo); + } catch (Exception e) { + LOGGER.error( + "method=getConnectorStateInfoFromCluster||connectClusterId={}||connectorName={}||errMsg=exception", + connectCluster.getId(), connectorName, e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } + + private void updateStatus(ConnectCluster connectCluster, Long connectClusterId, String connectorName) { + try { + // 延迟3秒 + BackoffUtils.backoff(2000); + + Result stateInfoResult = this.getConnectorStateInfoFromCluster(connectCluster, connectorName); + if (stateInfoResult.failed()) { + return; + } + + ConnectorPO po = new ConnectorPO(); + po.setConnectClusterId(connectClusterId); + po.setConnectorName(connectorName); + po.setState(stateInfoResult.getData().getConnector().getState()); + + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectorPO::getConnectClusterId, connectClusterId); + lambdaQueryWrapper.eq(ConnectorPO::getConnectorName, connectorName); + + connectorDAO.update(po, lambdaQueryWrapper); + } catch (Exception e) { + LOGGER.error( + "method=updateStatus||connectClusterId={}||connectorName={}||errMsg=exception", + connectClusterId, connectorName, e + ); + } + } +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/plugin/PluginService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/plugin/PluginService.java new file mode 100644 index 00000000..d842d940 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/plugin/PluginService.java @@ -0,0 +1,20 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.plugin; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.config.ConnectConfigInfos; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.plugin.ConnectPluginBasic; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; + +import java.util.List; +import java.util.Properties; + + +/** + * 查看Connector + */ +public interface PluginService { + Result getConfig(Long connectClusterId, String pluginName); + + Result validateConfig(Long connectClusterId, Properties props); + + Result> listPluginsFromCluster(Long connectClusterId); +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/plugin/impl/PluginServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/plugin/impl/PluginServiceImpl.java new file mode 100644 index 00000000..fa6f1394 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/plugin/impl/PluginServiceImpl.java @@ -0,0 +1,112 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.plugin.impl; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.config.ConnectConfigInfos; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.plugin.ConnectPluginBasic; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus; +import com.xiaojukeji.know.streaming.km.common.component.RestTool; +import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant; +import com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant; +import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum; +import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; +import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService; +import com.xiaojukeji.know.streaming.km.core.service.connect.plugin.PluginService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; +import org.apache.kafka.connect.runtime.rest.entities.ConfigInfos; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.HashMap; +import java.util.List; +import java.util.Properties; + +import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.SERVICE_OP_CONNECT_PLUGIN; + +@Service +public class PluginServiceImpl extends BaseVersionControlService implements PluginService { + private static final ILog LOGGER = LogFactory.getLog(PluginServiceImpl.class); + + @Autowired + private RestTool restTool; + + @Autowired + private ConnectClusterService connectClusterService; + + private static final String GET_PLUGIN_CONFIG_DESC_URI = "/connector-plugins/%s/config/validate"; + private static final String GET_ALL_PLUGINS_URI = "/connector-plugins"; + + @Override + protected VersionItemTypeEnum getVersionItemType() { + return SERVICE_OP_CONNECT_PLUGIN; + } + + @Override + public Result getConfig(Long connectClusterId, String pluginName) { + Properties props = new Properties(); + props.put(KafkaConnectConstant.CONNECTOR_CLASS_FILED_NAME, pluginName); + props.put(KafkaConnectConstant.CONNECTOR_TOPICS_FILED_NAME, KafkaConnectConstant.CONNECTOR_TOPICS_FILED_ERROR_VALUE); + + return this.validateConfig(connectClusterId, props); + } + + @Override + public Result validateConfig(Long connectClusterId, Properties props) { + try { + if (ValidateUtils.isBlank(props.getProperty(KafkaConnectConstant.CONNECTOR_CLASS_FILED_NAME))) { + return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "参数错误, connector.class字段数据不允许不存在或者为空"); + } + + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + // 通过参数检查接口,获取插件配置 + ConfigInfos configInfos = restTool.putJsonForObject( + connectCluster.getClusterUrl() + String.format(GET_PLUGIN_CONFIG_DESC_URI, props.getProperty(KafkaConnectConstant.CONNECTOR_CLASS_FILED_NAME)), + props, + ConfigInfos.class + ); + + return Result.buildSuc(new ConnectConfigInfos(configInfos)); + } catch (Exception e) { + LOGGER.error( + "method=validateConfig||connectClusterId={}||pluginName={}||errMsg=exception", + connectClusterId, + props.getProperty(KafkaConnectConstant.CONNECTOR_CLASS_FILED_NAME), + e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } + + @Override + public Result> listPluginsFromCluster(Long connectClusterId) { + try { + ConnectCluster connectCluster = connectClusterService.getById(connectClusterId); + if (ValidateUtils.isNull(connectCluster)) { + return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId)); + } + + // 通过参数检查接口,获取插件配置 + List pluginList = restTool.getArrayObjectWithJsonContent( + connectCluster.getClusterUrl() + GET_ALL_PLUGINS_URI, + new HashMap<>(), + ConnectPluginBasic.class + ); + + return Result.buildSuc(pluginList); + } catch (Exception e) { + LOGGER.error( + "method=listPluginsFromCluster||connectClusterId={}||errMsg=exception", + connectClusterId, e + ); + + return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage()); + } + } +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/WorkerConnectorService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/WorkerConnectorService.java new file mode 100644 index 00000000..f5a5b598 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/WorkerConnectorService.java @@ -0,0 +1,23 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.worker; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.task.TaskActionDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.WorkerConnector; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; + +import java.util.List; + +/** + * Worker + */ +public interface WorkerConnectorService { + void batchReplaceInDB(Long connectClusterId, List workerList); + + List listFromDB(Long connectClusterId); + + List listByKafkaClusterIdFromDB(Long kafkaClusterPhyId); + + Result actionTask(TaskActionDTO dto); + + List getWorkerConnectorListFromCluster(ConnectCluster connectCluster, String connectorName); +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/WorkerService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/WorkerService.java new file mode 100644 index 00000000..067a33d8 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/WorkerService.java @@ -0,0 +1,38 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.worker; + +import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectWorker; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult; +import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector.ClusterWorkerOverviewVO; + +import java.util.List; + +/** + * Worker + * @author didi + */ +public interface WorkerService { + /** + * 批量插入数据库 + * @param connectClusterId + * @param workerList + */ + void batchReplaceInDB(Long connectClusterId, List workerList); + + /** + * 从数据库中获取 + * @param connectClusterId + * @return + */ + List listFromDB(Long connectClusterId); + + /** + * 分页获取 + * @param kafkaClusterPhyId + * @param dto + * @return + */ + PaginationResult pageWorkByKafkaClusterPhy(Long kafkaClusterPhyId, PaginationBaseDTO dto); + + List listByKafkaClusterIdFromDB(Long kafkaClusterPhyId); +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/impl/WorkerConnectorServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/impl/WorkerConnectorServiceImpl.java new file mode 100644 index 00000000..99fb9ba2 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/impl/WorkerConnectorServiceImpl.java @@ -0,0 +1,143 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.worker.impl; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.task.TaskActionDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectWorker; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.WorkerConnector; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorStateInfo; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSTaskState; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.WorkerConnectorPO; +import com.xiaojukeji.know.streaming.km.common.component.RestTool; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService; +import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerConnectorService; +import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerService; +import com.xiaojukeji.know.streaming.km.persistence.connect.cache.LoadedConnectClusterCache; +import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.WorkerConnectorDAO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.DuplicateKeyException; +import org.springframework.stereotype.Service; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectActionEnum.RESTART; + +@Service +public class WorkerConnectorServiceImpl implements WorkerConnectorService { + + protected static final ILog LOGGER = LogFactory.getLog(WorkerConnectorServiceImpl.class); + @Autowired + private WorkerConnectorDAO workerConnectorDAO; + + @Autowired + private RestTool restTool; + + @Autowired + private ConnectorService connectorService; + + @Autowired + private WorkerService workerService; + + + private static final String RESTART_TASK_URI = "%s/connectors/%s/tasks/%d/restart"; + + @Override + public void batchReplaceInDB(Long connectClusterId, List workerList) { + Map oldMap = new HashMap<>(); + for (WorkerConnectorPO oldPO : this.listPOSFromDB(connectClusterId)) { + oldMap.put(oldPO.getConnectorName() + oldPO.getWorkerId() + oldPO.getTaskId() + oldPO.getState(), oldPO); + } + + for (WorkerConnector workerConnector : workerList) { + try { + String key = workerConnector.getConnectorName() + workerConnector.getWorkerId() + workerConnector.getTaskId() + workerConnector.getState(); + + WorkerConnectorPO oldPO = oldMap.remove(key); + if (oldPO == null) { + workerConnectorDAO.insert(ConvertUtil.obj2Obj(workerConnector, WorkerConnectorPO.class)); + } else { + // 如果该数据已经存在,则不需要进行操作 + } + } catch (DuplicateKeyException dke) { + // ignore + } + } + + try { + oldMap.values().forEach(elem -> workerConnectorDAO.deleteById(elem.getId())); + } catch (Exception e) { + // ignore + } + } + + @Override + public List listFromDB(Long connectClusterId) { + return ConvertUtil.list2List(this.listPOSFromDB(connectClusterId), WorkerConnector.class); + } + + @Override + public List listByKafkaClusterIdFromDB(Long kafkaClusterPhyId) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(WorkerConnectorPO::getKafkaClusterPhyId, kafkaClusterPhyId); + return ConvertUtil.list2List(workerConnectorDAO.selectList(lambdaQueryWrapper), WorkerConnector.class); + } + + + @Override + public Result actionTask(TaskActionDTO dto) { + if (!dto.getAction().equals(RESTART.getValue())) { + return Result.buildFailure(ResultStatus.OPERATION_FORBIDDEN); + } + + ConnectCluster connectCluster = LoadedConnectClusterCache.getByPhyId(dto.getConnectClusterId()); + + if (connectCluster == null) { + return Result.buildFailure(ResultStatus.NOT_EXIST); + } + + String url = String.format(RESTART_TASK_URI, connectCluster.getClusterUrl(), dto.getConnectorName(), dto.getTaskId()); + try { + restTool.postObjectWithJsonContent(url, null, String.class); + } catch (Exception e) { + LOGGER.error("method=actionTask||connectClusterId={}||connectorName={}||taskId={}||restart failed||msg=exception", + dto.getConnectClusterId(), dto.getConnectorName(), dto.getTaskId(), e); + } + return Result.buildSuc(); + } + + @Override + public List getWorkerConnectorListFromCluster(ConnectCluster connectCluster, String connectorName) { + Map workerMap = workerService.listFromDB(connectCluster.getId()).stream().collect(Collectors.toMap(elem -> elem.getWorkerId(), Function.identity())); + List workerConnectorList = new ArrayList<>(); + Result ret = connectorService.getConnectorStateInfoFromCluster(connectCluster.getId(), connectorName); + if (!ret.hasData()) { + return workerConnectorList; + } + + KSConnectorStateInfo ksConnectorStateInfo = ret.getData(); + for (KSTaskState task : ksConnectorStateInfo.getTasks()) { + WorkerConnector workerConnector = new WorkerConnector(connectCluster.getKafkaClusterPhyId(), connectCluster.getId(), ksConnectorStateInfo.getName(), workerMap.get(task.getWorkerId()).getMemberId(), task.getId(), task.getState(), task.getWorkerId(), task.getTrace()); + workerConnectorList.add(workerConnector); + } + return workerConnectorList; + } + + + + private List listPOSFromDB(Long connectClusterId) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(WorkerConnectorPO::getConnectClusterId, connectClusterId); + + return workerConnectorDAO.selectList(lambdaQueryWrapper); + } +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/impl/WorkerServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/impl/WorkerServiceImpl.java new file mode 100644 index 00000000..c52998f1 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/connect/worker/impl/WorkerServiceImpl.java @@ -0,0 +1,114 @@ +package com.xiaojukeji.know.streaming.km.core.service.connect.worker.impl; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectWorker; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectWorkerPO; +import com.xiaojukeji.know.streaming.km.common.bean.po.group.GroupMemberPO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector.ClusterWorkerOverviewVO; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; +import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService; +import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService; +import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerService; +import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.ConnectWorkerDAO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.DuplicateKeyException; +import org.springframework.stereotype.Service; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@Service +public class WorkerServiceImpl implements WorkerService { + @Autowired + private ConnectWorkerDAO connectWorkerDAO; + + @Autowired + private ConnectorService connectorService; + + @Autowired + private ConnectClusterService connectClusterService; + + @Override + public void batchReplaceInDB(Long connectClusterId, List workerList) { + Map oldMap = new HashMap<>(); + for (ConnectWorkerPO oldPO: this.listPOSFromDB(connectClusterId)) { + oldMap.put(oldPO.getMemberId(), oldPO); + } + + for (ConnectWorker worker: workerList) { + try { + ConnectWorkerPO newPO = ConvertUtil.obj2Obj(worker, ConnectWorkerPO.class); + ConnectWorkerPO oldPO = oldMap.remove(newPO.getMemberId()); + if (oldPO == null) { + connectWorkerDAO.insert(newPO); + } else { + newPO.setId(oldPO.getId()); + connectWorkerDAO.updateById(newPO); + } + } catch (DuplicateKeyException dke) { + // ignore + } + } + + try { + oldMap.values().forEach(elem -> connectWorkerDAO.deleteById(elem.getId())); + } catch (Exception e) { + // ignore + } + } + + @Override + public List listFromDB(Long connectClusterId) { + return ConvertUtil.list2List(this.listPOSFromDB(connectClusterId), ConnectWorker.class); + } + + @Override + public PaginationResult pageWorkByKafkaClusterPhy(Long kafkaClusterPhyId, PaginationBaseDTO dto) { + IPage pageInfo = new Page<>(dto.getPageNo(), dto.getPageSize()); + + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectWorkerPO::getKafkaClusterPhyId, kafkaClusterPhyId); + lambdaQueryWrapper.like(!ValidateUtils.isBlank(dto.getSearchKeywords()), ConnectWorkerPO::getHost, dto.getSearchKeywords()); + connectWorkerDAO.selectPage(pageInfo, lambdaQueryWrapper); + + List connectWorkerPOS = pageInfo.getRecords(); + List clusterWorkerOverviewVOS = new ArrayList<>(); + + for(ConnectWorkerPO connectWorkerPO : connectWorkerPOS){ + Long connectClusterId = connectWorkerPO.getConnectClusterId(); + + ClusterWorkerOverviewVO clusterWorkerOverviewVO = new ClusterWorkerOverviewVO(); + clusterWorkerOverviewVO.setConnectClusterId(connectClusterId); + clusterWorkerOverviewVO.setWorkerHost(connectWorkerPO.getHost()); + clusterWorkerOverviewVO.setConnectorCount(connectorService.countByConnectClusterIdFromDB(connectClusterId)); + clusterWorkerOverviewVO.setConnectClusterName(connectClusterService.getClusterName(connectClusterId)); + clusterWorkerOverviewVO.setTaskCount(1); + + clusterWorkerOverviewVOS.add(clusterWorkerOverviewVO); + } + + return PaginationResult.buildSuc(clusterWorkerOverviewVOS, pageInfo); + } + + @Override + public List listByKafkaClusterIdFromDB(Long kafkaClusterPhyId) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectWorkerPO::getKafkaClusterPhyId, kafkaClusterPhyId); + return ConvertUtil.list2List(connectWorkerDAO.selectList(lambdaQueryWrapper), ConnectWorker.class); + } + + /**************************************************** private method ****************************************************/ + private List listPOSFromDB(Long connectClusterId) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectWorkerPO::getConnectClusterId, connectClusterId); + + return connectWorkerDAO.selectList(lambdaQueryWrapper); + } +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/group/GroupService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/group/GroupService.java index 8dc1c535..3f56a0b3 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/group/GroupService.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/group/GroupService.java @@ -1,14 +1,15 @@ package com.xiaojukeji.know.streaming.km.core.service.group; import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.entity.group.Group; +import com.xiaojukeji.know.streaming.km.common.bean.entity.kafka.KSGroupDescription; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; import com.xiaojukeji.know.streaming.km.common.bean.po.group.GroupMemberPO; import com.xiaojukeji.know.streaming.km.common.enums.group.GroupStateEnum; import com.xiaojukeji.know.streaming.km.common.exception.AdminOperateException; import com.xiaojukeji.know.streaming.km.common.exception.NotExistException; -import org.apache.kafka.clients.admin.ConsumerGroupDescription; import org.apache.kafka.common.TopicPartition; import java.util.Date; @@ -19,16 +20,16 @@ public interface GroupService { /** * 从Kafka中获取消费组名称列表 */ - List listGroupsFromKafka(Long clusterPhyId) throws NotExistException, AdminOperateException; + List listGroupsFromKafka(ClusterPhy clusterPhy) throws AdminOperateException; /** * 从Kafka中获取消费组详细信息 */ - Group getGroupFromKafka(Long clusterPhyId, String groupName) throws NotExistException, AdminOperateException; + Group getGroupFromKafka(ClusterPhy clusterPhy, String groupName) throws NotExistException, AdminOperateException; Map getGroupOffsetFromKafka(Long clusterPhyId, String groupName) throws NotExistException, AdminOperateException; - ConsumerGroupDescription getGroupDescriptionFromKafka(Long clusterPhyId, String groupName) throws NotExistException, AdminOperateException; + KSGroupDescription getGroupDescriptionFromKafka(ClusterPhy clusterPhy, String groupName) throws AdminOperateException; Result resetGroupOffsets(Long clusterPhyId, String groupName, Map offsetMap, String operator) throws NotExistException, AdminOperateException; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/group/impl/GroupServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/group/impl/GroupServiceImpl.java index 1a923f21..c80652ee 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/group/impl/GroupServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/group/impl/GroupServiceImpl.java @@ -7,8 +7,10 @@ import com.didiglobal.logi.log.ILog; import com.didiglobal.logi.log.LogFactory; import com.didiglobal.logi.security.common.dto.oplog.OplogDTO; import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO; +import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; import com.xiaojukeji.know.streaming.km.common.bean.entity.group.Group; import com.xiaojukeji.know.streaming.km.common.bean.entity.group.GroupTopicMember; +import com.xiaojukeji.know.streaming.km.common.bean.entity.kafka.*; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus; @@ -17,6 +19,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.po.group.GroupPO; import com.xiaojukeji.know.streaming.km.common.constant.KafkaConstant; import com.xiaojukeji.know.streaming.km.common.converter.GroupConverter; import com.xiaojukeji.know.streaming.km.common.enums.group.GroupStateEnum; +import com.xiaojukeji.know.streaming.km.common.enums.group.GroupTypeEnum; import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.ModuleEnum; import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.OperationEnum; import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum; @@ -24,9 +27,10 @@ import com.xiaojukeji.know.streaming.km.common.exception.AdminOperateException; import com.xiaojukeji.know.streaming.km.common.exception.NotExistException; import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; +import com.xiaojukeji.know.streaming.km.common.utils.kafka.KSPartialKafkaAdminClient; import com.xiaojukeji.know.streaming.km.core.service.group.GroupService; import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService; -import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient; import com.xiaojukeji.know.streaming.km.persistence.mysql.group.GroupDAO; import com.xiaojukeji.know.streaming.km.persistence.mysql.group.GroupMemberDAO; @@ -36,6 +40,7 @@ import org.apache.kafka.common.TopicPartition; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +import java.time.Duration; import java.util.*; import java.util.function.Function; import java.util.stream.Collectors; @@ -43,7 +48,7 @@ import java.util.stream.Collectors; import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.SERVICE_SEARCH_GROUP; @Service -public class GroupServiceImpl extends BaseVersionControlService implements GroupService { +public class GroupServiceImpl extends BaseKafkaVersionControlService implements GroupService { private static final ILog log = LogFactory.getLog(GroupServiceImpl.class); @Autowired @@ -64,11 +69,18 @@ public class GroupServiceImpl extends BaseVersionControlService implements Group } @Override - public List listGroupsFromKafka(Long clusterPhyId) throws NotExistException, AdminOperateException { - AdminClient adminClient = kafkaAdminClient.getClient(clusterPhyId); - + public List listGroupsFromKafka(ClusterPhy clusterPhy) throws AdminOperateException { + KSPartialKafkaAdminClient adminClient = null; try { - ListConsumerGroupsResult listConsumerGroupsResult = adminClient.listConsumerGroups( + Properties props = ConvertUtil.str2ObjByJson(clusterPhy.getClientProperties(), Properties.class); + if (props == null) { + props = new Properties(); + } + + props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, clusterPhy.getBootstrapServers()); + + adminClient = KSPartialKafkaAdminClient.create(props); + KSListGroupsResult listConsumerGroupsResult = adminClient.listConsumerGroups( new ListConsumerGroupsOptions() .timeoutMs(KafkaConstant.ADMIN_CLIENT_REQUEST_TIME_OUT_UNIT_MS) ); @@ -80,33 +92,46 @@ public class GroupServiceImpl extends BaseVersionControlService implements Group return groupNameList; } catch (Exception e) { - log.error("method=getGroupsFromKafka||clusterPhyId={}||errMsg=exception!", clusterPhyId, e); + log.error("method=listGroupsFromKafka||clusterPhyId={}||errMsg=exception!", clusterPhy.getId(), e); throw new AdminOperateException(e.getMessage(), e, ResultStatus.KAFKA_OPERATE_FAILED); + } finally { + if (adminClient != null) { + try { + adminClient.close(Duration.ofSeconds(10)); + } catch (Exception e) { + // ignore + } + } } } @Override - public Group getGroupFromKafka(Long clusterPhyId, String groupName) throws NotExistException, AdminOperateException { + public Group getGroupFromKafka(ClusterPhy clusterPhy, String groupName) throws NotExistException, AdminOperateException { // 获取消费组的详细信息 - ConsumerGroupDescription groupDescription = this.getGroupDescriptionFromKafka(clusterPhyId, groupName); + KSGroupDescription groupDescription = this.getGroupDescriptionFromKafka(clusterPhy, groupName); if (groupDescription == null) { return null; } - Group group = new Group(clusterPhyId, groupName, groupDescription); + Group group = new Group(clusterPhy.getId(), groupName, groupDescription); // 获取消费组消费过哪些Topic Map memberMap = new HashMap<>(); - for (TopicPartition tp : this.getGroupOffsetFromKafka(clusterPhyId, groupName).keySet()) { + for (TopicPartition tp : this.getGroupOffsetFromKafka(clusterPhy.getId(), groupName).keySet()) { memberMap.putIfAbsent(tp.topic(), new GroupTopicMember(tp.topic(), 0)); } // 记录成员信息 - for (MemberDescription memberDescription : groupDescription.members()) { + for (KSMemberDescription memberDescription : groupDescription.members()) { + if (group.getType() == GroupTypeEnum.CONNECT_CLUSTER) { + continue; + } Set partitionList = new HashSet<>(); - if (!ValidateUtils.isNull(memberDescription.assignment().topicPartitions())) { - partitionList = memberDescription.assignment().topicPartitions(); + + KSMemberConsumerAssignment assignment = (KSMemberConsumerAssignment) memberDescription.assignment(); + if (!ValidateUtils.isNull(assignment.topicPartitions())) { + partitionList = assignment.topicPartitions(); } Set topicNameSet = partitionList.stream().map(elem -> elem.topic()).collect(Collectors.toSet()); @@ -143,20 +168,36 @@ public class GroupServiceImpl extends BaseVersionControlService implements Group } @Override - public ConsumerGroupDescription getGroupDescriptionFromKafka(Long clusterPhyId, String groupName) throws NotExistException, AdminOperateException { - AdminClient adminClient = kafkaAdminClient.getClient(clusterPhyId); - + public KSGroupDescription getGroupDescriptionFromKafka(ClusterPhy clusterPhy, String groupName) throws AdminOperateException { + KSPartialKafkaAdminClient adminClient = null; try { - DescribeConsumerGroupsResult describeConsumerGroupsResult = adminClient.describeConsumerGroups( + Properties props = ConvertUtil.str2ObjByJson(clusterPhy.getClientProperties(), Properties.class); + if (props == null) { + props = new Properties(); + } + + props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, clusterPhy.getBootstrapServers()); + + adminClient = KSPartialKafkaAdminClient.create(props); + + KSDescribeGroupsResult describeGroupsResult = adminClient.describeConsumerGroups( Arrays.asList(groupName), new DescribeConsumerGroupsOptions().timeoutMs(KafkaConstant.ADMIN_CLIENT_REQUEST_TIME_OUT_UNIT_MS).includeAuthorizedOperations(false) ); - return describeConsumerGroupsResult.all().get().get(groupName); + return describeGroupsResult.all().get().get(groupName); } catch(Exception e){ - log.error("method=getGroupDescription||clusterPhyId={}|groupName={}||errMsg=exception!", clusterPhyId, groupName, e); + log.error("method=getGroupDescription||clusterPhyId={}|groupName={}||errMsg=exception!", clusterPhy.getId(), groupName, e); throw new AdminOperateException(e.getMessage(), e, ResultStatus.KAFKA_OPERATE_FAILED); + } finally { + if (adminClient != null) { + try { + adminClient.close(Duration.ofSeconds(10)); + } catch (Exception e) { + // ignore + } + } } } diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/AbstractHealthCheckService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/AbstractHealthCheckService.java index c6b2cf3f..b01b8832 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/AbstractHealthCheckService.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/AbstractHealthCheckService.java @@ -24,7 +24,7 @@ public abstract class AbstractHealthCheckService { Function, HealthCheckResult> > functionMap = new ConcurrentHashMap<>(); - public abstract List getResList(Long clusterPhyId); + public abstract List getResList(Long clusterId); public abstract HealthCheckDimensionEnum getHealthCheckDimensionEnum(); diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/connect/HealthCheckConnectClusterService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/connect/HealthCheckConnectClusterService.java new file mode 100644 index 00000000..566399b6 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/connect/HealthCheckConnectClusterService.java @@ -0,0 +1,95 @@ +package com.xiaojukeji.know.streaming.km.core.service.health.checker.connect; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.common.bean.entity.config.healthcheck.BaseClusterHealthConfig; +import com.xiaojukeji.know.streaming.km.common.bean.entity.config.healthcheck.HealthCompareValueConfig; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.health.HealthCheckResult; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectClusterMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ConnectClusterParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.constant.Constant; +import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckDimensionEnum; +import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckNameEnum; +import com.xiaojukeji.know.streaming.km.common.utils.Tuple; +import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterMetricService; +import com.xiaojukeji.know.streaming.km.core.service.health.checker.AbstractHealthCheckService; +import com.xiaojukeji.know.streaming.km.persistence.connect.cache.LoadedConnectClusterCache; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import javax.annotation.PostConstruct; +import java.util.ArrayList; +import java.util.List; + +import static com.xiaojukeji.know.streaming.km.core.service.version.metrics.connect.ConnectClusterMetricVersionItems.CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_FAILURE_PERCENTAGE; + +/** + * @author wyb + * @date 2022/11/9 + */ +@Service +public class HealthCheckConnectClusterService extends AbstractHealthCheckService { + + private static final ILog log = LogFactory.getLog(HealthCheckConnectClusterService.class); + + @Autowired + private ConnectClusterMetricService connectClusterMetricService; + + @PostConstruct + private void init() { + functionMap.putIfAbsent(HealthCheckNameEnum.CONNECT_CLUSTER_TASK_STARTUP_FAILURE_PERCENTAGE.getConfigName(), this::checkStartupFailurePercentage); + } + + @Override + public List getResList(Long connectClusterId) { + List paramList = new ArrayList<>(); + if (LoadedConnectClusterCache.containsByPhyId(connectClusterId)) { + paramList.add(new ConnectClusterParam(connectClusterId)); + } + return paramList; + } + + @Override + public HealthCheckDimensionEnum getHealthCheckDimensionEnum() { + return HealthCheckDimensionEnum.CONNECT_CLUSTER; + } + + private HealthCheckResult checkStartupFailurePercentage(Tuple paramTuple) { + ConnectClusterParam param = (ConnectClusterParam) paramTuple.getV1(); + HealthCompareValueConfig compareConfig = (HealthCompareValueConfig) paramTuple.getV2(); + + Long connectClusterId = param.getConnectClusterId(); + String metricName = CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_FAILURE_PERCENTAGE; + + Result ret = connectClusterMetricService.collectConnectClusterMetricsFromKafka(connectClusterId, metricName); + + if (!ret.hasData()) { + log.error("method=checkStartupFailurePercentage||connectClusterId={}||metricName={}||errMsg=get metrics failed", + param.getConnectClusterId(), metricName); + return null; + } + + Float value = ret.getData().getMetric(metricName); + + if (value == null) { + log.error("method=checkStartupFailurePercentage||connectClusterId={}||metricName={}||errMsg=get metrics failed", + param.getConnectClusterId(), metricName); + return null; + } + + ConnectCluster connectCluster = LoadedConnectClusterCache.getByPhyId(connectClusterId); + + HealthCheckResult checkResult = new HealthCheckResult( + HealthCheckDimensionEnum.CONNECT_CLUSTER.getDimension(), + HealthCheckNameEnum.CONNECT_CLUSTER_TASK_STARTUP_FAILURE_PERCENTAGE.getConfigName(), + connectCluster.getKafkaClusterPhyId(), + String.valueOf(connectClusterId) + ); + checkResult.setPassed(value <= compareConfig.getValue() ? Constant.YES : Constant.NO); + return checkResult; + + } +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/connect/HealthCheckConnectorService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/connect/HealthCheckConnectorService.java new file mode 100644 index 00000000..8f30ade8 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/connect/HealthCheckConnectorService.java @@ -0,0 +1,122 @@ +package com.xiaojukeji.know.streaming.km.core.service.health.checker.connect; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.common.bean.entity.config.healthcheck.BaseClusterHealthConfig; +import com.xiaojukeji.know.streaming.km.common.bean.entity.config.healthcheck.HealthCompareValueConfig; +import com.xiaojukeji.know.streaming.km.common.bean.entity.health.HealthCheckResult; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectorMetrics; +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.connect.ConnectorParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; +import com.xiaojukeji.know.streaming.km.common.constant.Constant; +import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckDimensionEnum; +import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckNameEnum; +import com.xiaojukeji.know.streaming.km.common.utils.Tuple; +import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorMetricService; +import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService; +import com.xiaojukeji.know.streaming.km.core.service.health.checker.AbstractHealthCheckService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import javax.annotation.PostConstruct; +import java.util.ArrayList; +import java.util.List; + +import static com.xiaojukeji.know.streaming.km.core.service.version.metrics.connect.ConnectorMetricVersionItems.CONNECTOR_METRIC_CONNECTOR_FAILED_TASK_COUNT; +import static com.xiaojukeji.know.streaming.km.core.service.version.metrics.connect.ConnectorMetricVersionItems.CONNECTOR_METRIC_CONNECTOR_UNASSIGNED_TASK_COUNT; + +/** + * @author wyb + * @date 2022/11/8 + */ +@Service +public class HealthCheckConnectorService extends AbstractHealthCheckService { + + private static final ILog log = LogFactory.getLog(HealthCheckConnectorService.class); + @Autowired + private ConnectorService connectorService; + + @Autowired + private ConnectorMetricService connectorMetricService; + + @PostConstruct + private void init() { + functionMap.putIfAbsent(HealthCheckNameEnum.CONNECTOR_FAILED_TASK_COUNT.getConfigName(), this::checkFailedTaskCount); + functionMap.putIfAbsent(HealthCheckNameEnum.CONNECTOR_UNASSIGNED_TASK_COUNT.getConfigName(), this::checkUnassignedTaskCount); + } + + @Override + public List getResList(Long connectClusterId) { + List paramList = new ArrayList<>(); + Result> ret = connectorService.listConnectorsFromCluster(connectClusterId); + if (!ret.hasData()) { + return paramList; + } + + for (String connectorName : ret.getData()) { + paramList.add(new ConnectorParam(connectClusterId, connectorName)); + } + + return paramList; + } + + @Override + public HealthCheckDimensionEnum getHealthCheckDimensionEnum() { + return HealthCheckDimensionEnum.CONNECTOR; + } + + private HealthCheckResult checkFailedTaskCount(Tuple paramTuple) { + ConnectorParam param = (ConnectorParam) paramTuple.getV1(); + HealthCompareValueConfig compareConfig = (HealthCompareValueConfig) paramTuple.getV2(); + + Long connectClusterId = param.getConnectClusterId(); + String connectorName = param.getConnectorName(); + Double compareValue = compareConfig.getValue(); + + return this.getHealthCompareResult(connectClusterId, connectorName, CONNECTOR_METRIC_CONNECTOR_FAILED_TASK_COUNT, HealthCheckNameEnum.CONNECTOR_FAILED_TASK_COUNT, compareValue); + } + + private HealthCheckResult checkUnassignedTaskCount(Tuple paramTuple) { + ConnectorParam param = (ConnectorParam) paramTuple.getV1(); + HealthCompareValueConfig compareConfig = (HealthCompareValueConfig) paramTuple.getV2(); + + Long connectClusterId = param.getConnectClusterId(); + String connectorName = param.getConnectorName(); + Double compareValue = compareConfig.getValue(); + + return this.getHealthCompareResult(connectClusterId, connectorName, CONNECTOR_METRIC_CONNECTOR_UNASSIGNED_TASK_COUNT, HealthCheckNameEnum.CONNECTOR_UNASSIGNED_TASK_COUNT, compareValue); + + } + + private HealthCheckResult getHealthCompareResult(Long connectClusterId, String connectorName, String metricName, HealthCheckNameEnum healthCheckNameEnum, Double compareValue) { + + Result ret = connectorMetricService.collectConnectClusterMetricsFromKafka(connectClusterId, connectorName, metricName); + + if (!ret.hasData()) { + log.error("method=getHealthCompareResult||connectClusterId={}||connectorName={}||metricName={}||errMsg=get metrics failed", + connectClusterId, connectorName, metricName); + return null; + } + + Float value = ret.getData().getMetric(metricName); + + if (value == null) { + log.error("method=getHealthCompareResult||connectClusterId={}||connectorName={}||metricName={}||errMsg=get metrics failed", + connectClusterId, connectorName, metricName); + return null; + } + + HealthCheckResult checkResult = new HealthCheckResult( + HealthCheckDimensionEnum.CONNECTOR.getDimension(), + healthCheckNameEnum.getConfigName(), + connectClusterId, + connectorName + ); + checkResult.setPassed(compareValue >= value ? Constant.YES : Constant.NO); + return checkResult; + + } + + +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/group/HealthCheckGroupService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/group/HealthCheckGroupService.java index d5022cfc..62cf8d13 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/group/HealthCheckGroupService.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/group/HealthCheckGroupService.java @@ -9,6 +9,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.Cluster import com.xiaojukeji.know.streaming.km.common.bean.entity.param.group.GroupParam; import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result; import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchTerm; +import com.xiaojukeji.know.streaming.km.common.constant.Constant; import com.xiaojukeji.know.streaming.km.common.enums.group.GroupStateEnum; import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckNameEnum; import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckDimensionEnum; @@ -78,7 +79,7 @@ public class HealthCheckGroupService extends AbstractHealthCheckService { return null; } - checkResult.setPassed(countResult.getData() >= singleConfig.getDetectedTimes()? 0: 1); + checkResult.setPassed(countResult.getData() >= singleConfig.getDetectedTimes() ? Constant.NO : Constant.YES); return checkResult; } diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/topic/HealthCheckTopicService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/topic/HealthCheckTopicService.java index 2557fd5b..57537bf4 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/topic/HealthCheckTopicService.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checker/topic/HealthCheckTopicService.java @@ -109,7 +109,7 @@ public class HealthCheckTopicService extends AbstractHealthCheckService { param.getTopicName() ); - checkResult.setPassed(partitionList.stream().filter(elem -> elem.getLeaderBrokerId().equals(Constant.INVALID_CODE)).count() >= valueConfig.getValue()? 0: 1); + checkResult.setPassed(partitionList.stream().filter(elem -> elem.getLeaderBrokerId().equals(Constant.INVALID_CODE)).count() >= valueConfig.getValue() ? Constant.NO : Constant.YES); return checkResult; } diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checkresult/HealthCheckResultService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checkresult/HealthCheckResultService.java index 66a48904..05346d0d 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checkresult/HealthCheckResultService.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checkresult/HealthCheckResultService.java @@ -24,4 +24,6 @@ public interface HealthCheckResultService { Map getClusterHealthConfig(Long clusterPhyId); void batchReplace(Long clusterPhyId, Integer dimension, List healthCheckResults); + + List getConnectorHealthCheckResult(Long clusterPhyId); } diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checkresult/impl/HealthCheckResultServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checkresult/impl/HealthCheckResultServiceImpl.java index 2689c50c..4f0640c2 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checkresult/impl/HealthCheckResultServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/checkresult/impl/HealthCheckResultServiceImpl.java @@ -7,20 +7,26 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.config.healthcheck.Ba import com.xiaojukeji.know.streaming.km.common.bean.entity.health.HealthCheckAggResult; import com.xiaojukeji.know.streaming.km.common.bean.entity.health.HealthCheckResult; import com.xiaojukeji.know.streaming.km.common.bean.po.config.PlatformClusterConfigPO; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectClusterPO; import com.xiaojukeji.know.streaming.km.common.bean.po.health.HealthCheckResultPO; import com.xiaojukeji.know.streaming.km.common.enums.config.ConfigGroupEnum; import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckDimensionEnum; import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckNameEnum; import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; import com.xiaojukeji.know.streaming.km.core.cache.DataBaseDataLocalCache; +import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; import com.xiaojukeji.know.streaming.km.core.service.config.PlatformClusterConfigService; import com.xiaojukeji.know.streaming.km.core.service.health.checkresult.HealthCheckResultService; +import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.ConnectClusterDAO; import com.xiaojukeji.know.streaming.km.persistence.mysql.health.HealthCheckResultDAO; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DuplicateKeyException; import org.springframework.stereotype.Service; import java.util.*; +import java.util.stream.Collectors; + +import static com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckDimensionEnum.CONNECTOR; @Service public class HealthCheckResultServiceImpl implements HealthCheckResultService { @@ -29,6 +35,9 @@ public class HealthCheckResultServiceImpl implements HealthCheckResultService { @Autowired private HealthCheckResultDAO healthCheckResultDAO; + @Autowired + private ConnectClusterDAO connectClusterDAO; + @Autowired private PlatformClusterConfigService platformClusterConfigService; @@ -122,6 +131,25 @@ public class HealthCheckResultServiceImpl implements HealthCheckResultService { return configMap; } + @Override + public List getConnectorHealthCheckResult(Long clusterPhyId) { + List resultPOList = new ArrayList<>(); + + //查找connect集群 + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectClusterPO::getKafkaClusterPhyId, clusterPhyId); + List connectClusterIdList = connectClusterDAO.selectList(lambdaQueryWrapper).stream().map(elem -> elem.getId()).collect(Collectors.toList()); + if (ValidateUtils.isEmptyList(connectClusterIdList)) { + return resultPOList; + } + + LambdaQueryWrapper wrapper = new LambdaQueryWrapper<>(); + wrapper.eq(HealthCheckResultPO::getDimension, CONNECTOR.getDimension()); + wrapper.in(HealthCheckResultPO::getClusterPhyId, connectClusterIdList); + resultPOList.addAll(healthCheckResultDAO.selectList(wrapper)); + return resultPOList; + } + @Override public void batchReplace(Long clusterPhyId, Integer dimension, List healthCheckResults) { List inDBList = this.listCheckResult(clusterPhyId, dimension); diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/state/HealthStateService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/state/HealthStateService.java index 35692cb8..bfe9a1bb 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/state/HealthStateService.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/state/HealthStateService.java @@ -2,6 +2,7 @@ package com.xiaojukeji.know.streaming.km.core.service.health.state; import com.xiaojukeji.know.streaming.km.common.bean.entity.health.HealthScoreResult; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.*; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectorMetrics; import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckDimensionEnum; import java.util.List; @@ -16,11 +17,13 @@ public interface HealthStateService { TopicMetrics calTopicHealthMetrics(Long clusterPhyId, String topicName); GroupMetrics calGroupHealthMetrics(Long clusterPhyId, String groupName); ZookeeperMetrics calZookeeperHealthMetrics(Long clusterPhyId); + ConnectorMetrics calConnectorHealthMetrics(Long connectClusterId, String connectorName); /** * 获取集群健康检查结果 */ List getClusterHealthResult(Long clusterPhyId); List getDimensionHealthResult(Long clusterPhyId, HealthCheckDimensionEnum dimensionEnum); - List getResHealthResult(Long clusterPhyId, Integer dimension, String resNme); + List getDimensionHealthResult(Long clusterPhyId, List dimensionCodeList); + List getResHealthResult(Long clusterPhyId, Long clusterId, Integer dimension, String resNme); } diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/state/impl/HealthStateServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/state/impl/HealthStateServiceImpl.java index 5669f300..8193af86 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/state/impl/HealthStateServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/health/state/impl/HealthStateServiceImpl.java @@ -2,24 +2,31 @@ package com.xiaojukeji.know.streaming.km.core.service.health.state.impl; import com.xiaojukeji.know.streaming.km.common.bean.entity.broker.Broker; import com.xiaojukeji.know.streaming.km.common.bean.entity.config.healthcheck.BaseClusterHealthConfig; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; import com.xiaojukeji.know.streaming.km.common.bean.entity.health.HealthCheckAggResult; import com.xiaojukeji.know.streaming.km.common.bean.entity.health.HealthScoreResult; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.*; +import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectorMetrics; import com.xiaojukeji.know.streaming.km.common.bean.po.health.HealthCheckResultPO; import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckDimensionEnum; import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckNameEnum; import com.xiaojukeji.know.streaming.km.common.enums.health.HealthStateEnum; import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService; +import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService; import com.xiaojukeji.know.streaming.km.core.service.health.checkresult.HealthCheckResultService; import com.xiaojukeji.know.streaming.km.core.service.health.state.HealthStateService; import com.xiaojukeji.know.streaming.km.core.service.zookeeper.ZookeeperService; +import com.xiaojukeji.know.streaming.km.persistence.connect.cache.LoadedConnectClusterCache; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.*; import java.util.stream.Collectors; +import java.util.List; +import static com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckDimensionEnum.*; +import static com.xiaojukeji.know.streaming.km.core.service.version.metrics.connect.ConnectorMetricVersionItems.*; import static com.xiaojukeji.know.streaming.km.core.service.version.metrics.kafka.BrokerMetricVersionItems.*; import static com.xiaojukeji.know.streaming.km.core.service.version.metrics.kafka.ClusterMetricVersionItems.*; import static com.xiaojukeji.know.streaming.km.core.service.version.metrics.kafka.GroupMetricVersionItems.*; @@ -38,6 +45,9 @@ public class HealthStateServiceImpl implements HealthStateService { @Autowired private BrokerService brokerService; + @Autowired + private ConnectClusterService connectClusterService; + @Override public ClusterMetrics calClusterHealthMetrics(Long clusterPhyId) { ClusterMetrics metrics = new ClusterMetrics(clusterPhyId); @@ -59,6 +69,7 @@ public class HealthStateServiceImpl implements HealthStateService { metrics.putMetric(this.calClusterTopicsHealthMetrics(clusterPhyId).getMetrics()); metrics.putMetric(this.calClusterGroupsHealthMetrics(clusterPhyId).getMetrics()); metrics.putMetric(this.calZookeeperHealthMetrics(clusterPhyId).getMetrics()); + metrics.putMetric(this.calClusterConnectsHealthMetrics(clusterPhyId).getMetrics()); // 统计最终结果 Float passed = 0.0f; @@ -67,6 +78,7 @@ public class HealthStateServiceImpl implements HealthStateService { passed += metrics.getMetric(CLUSTER_METRIC_HEALTH_CHECK_PASSED_BROKERS); passed += metrics.getMetric(CLUSTER_METRIC_HEALTH_CHECK_PASSED_GROUPS); passed += metrics.getMetric(CLUSTER_METRIC_HEALTH_CHECK_PASSED_CLUSTER); + passed += metrics.getMetric(CLUSTER_METRIC_HEALTH_CHECK_PASSED_CONNECTOR); Float total = 0.0f; total += metrics.getMetric(ZOOKEEPER_METRIC_HEALTH_CHECK_TOTAL); @@ -74,6 +86,7 @@ public class HealthStateServiceImpl implements HealthStateService { total += metrics.getMetric(CLUSTER_METRIC_HEALTH_CHECK_TOTAL_BROKERS); total += metrics.getMetric(CLUSTER_METRIC_HEALTH_CHECK_TOTAL_GROUPS); total += metrics.getMetric(CLUSTER_METRIC_HEALTH_CHECK_TOTAL_CLUSTER); + total += metrics.getMetric(CLUSTER_METRIC_HEALTH_CHECK_TOTAL_CONNECTOR); // 状态 Float state = 0.0f; @@ -82,6 +95,7 @@ public class HealthStateServiceImpl implements HealthStateService { state = Math.max(state, metrics.getMetric(CLUSTER_METRIC_HEALTH_STATE_BROKERS)); state = Math.max(state, metrics.getMetric(CLUSTER_METRIC_HEALTH_STATE_GROUPS)); state = Math.max(state, metrics.getMetric(CLUSTER_METRIC_HEALTH_STATE_CLUSTER)); + state = Math.max(state, metrics.getMetric(CLUSTER_METRIC_HEALTH_STATE_CONNECTOR)); metrics.getMetrics().put(CLUSTER_METRIC_HEALTH_CHECK_PASSED, passed); metrics.getMetrics().put(CLUSTER_METRIC_HEALTH_CHECK_TOTAL, total); @@ -184,6 +198,31 @@ public class HealthStateServiceImpl implements HealthStateService { return metrics; } + @Override + public ConnectorMetrics calConnectorHealthMetrics(Long connectClusterId, String connectorName) { + ConnectCluster connectCluster = LoadedConnectClusterCache.getByPhyId(connectClusterId); + ConnectorMetrics metrics = new ConnectorMetrics(connectClusterId, connectorName); + + // 找不到connect集群 + if (connectCluster == null) { + metrics.putMetric(CONNECTOR_METRIC_HEALTH_STATE, (float) HealthStateEnum.DEAD.getDimension()); + return metrics; + } + + List resultList = healthCheckResultService.getHealthCheckAggResult(connectClusterId, HealthCheckDimensionEnum.CONNECTOR, connectorName); + + if (ValidateUtils.isEmptyList(resultList)) { + metrics.getMetrics().put(CONNECTOR_METRIC_HEALTH_CHECK_PASSED, 0.0f); + metrics.getMetrics().put(CONNECTOR_METRIC_HEALTH_CHECK_TOTAL, 0.0f); + } else { + metrics.getMetrics().put(CONNECTOR_METRIC_HEALTH_CHECK_PASSED, this.getHealthCheckPassed(resultList)); + metrics.getMetrics().put(CONNECTOR_METRIC_HEALTH_CHECK_TOTAL, (float) resultList.size()); + } + + metrics.putMetric(CONNECTOR_METRIC_HEALTH_STATE, (float) this.calHealthState(resultList).getDimension()); + return metrics; + } + @Override public List getClusterHealthResult(Long clusterPhyId) { List poList = healthCheckResultService.listCheckResult(clusterPhyId); @@ -199,8 +238,36 @@ public class HealthStateServiceImpl implements HealthStateService { } @Override - public List getResHealthResult(Long clusterPhyId, Integer dimension, String resNme) { - List poList = healthCheckResultService.listCheckResult(clusterPhyId, dimension, resNme); + public List getDimensionHealthResult(Long clusterPhyId, List dimensionCodeList) { + //查找健康巡查结果 + List poList = new ArrayList<>(); + for (Integer dimensionCode : dimensionCodeList) { + HealthCheckDimensionEnum dimensionEnum = HealthCheckDimensionEnum.getByCode(dimensionCode); + + if (dimensionEnum.equals(HealthCheckDimensionEnum.UNKNOWN)) { + continue; + } + + if (dimensionEnum.equals(HealthCheckDimensionEnum.CONNECTOR)) { + poList.addAll(healthCheckResultService.getConnectorHealthCheckResult(clusterPhyId)); + } else { + poList.addAll(healthCheckResultService.listCheckResult(clusterPhyId, dimensionEnum.getDimension())); + } + } + + List resultList = this.getResHealthResult(clusterPhyId, dimensionCodeList, poList); + return resultList; + + } + + @Override + public List getResHealthResult(Long clusterPhyId, Long clusterId, Integer dimension, String resNme) { + List poList = healthCheckResultService.listCheckResult(clusterId, dimension, resNme); + Map> checkResultMap = new HashMap<>(); + for (HealthCheckResultPO po: poList) { + checkResultMap.putIfAbsent(po.getConfigName(), new ArrayList<>()); + checkResultMap.get(po.getConfigName()).add(po); + } return this.convert2HealthScoreResultList(clusterPhyId, poList, dimension); } @@ -272,6 +339,36 @@ public class HealthStateServiceImpl implements HealthStateService { return metrics; } + private ClusterMetrics calClusterConnectsHealthMetrics(Long clusterPhyId) { + //获取健康巡检结果 + List connectHealthCheckResult = healthCheckResultService.getConnectorHealthCheckResult(clusterPhyId); + + connectHealthCheckResult.addAll(healthCheckResultService.listCheckResult(clusterPhyId, CONNECT_CLUSTER.getDimension())); + + List dimensionCodeList = Arrays.asList(CONNECTOR.getDimension(), CONNECT_CLUSTER.getDimension()); + + List resultList = this.getDimensionHealthCheckAggResult(connectHealthCheckResult, dimensionCodeList); + + ClusterMetrics metrics = new ClusterMetrics(clusterPhyId); + + if (ValidateUtils.isEmptyList(resultList)) { + metrics.getMetrics().put(CLUSTER_METRIC_HEALTH_CHECK_PASSED_CONNECTOR, 0.0f); + metrics.getMetrics().put(CLUSTER_METRIC_HEALTH_CHECK_TOTAL_CONNECTOR, 0.0f); + } else { + metrics.getMetrics().put(CLUSTER_METRIC_HEALTH_CHECK_PASSED_CONNECTOR, this.getHealthCheckPassed(resultList)); + metrics.getMetrics().put(CLUSTER_METRIC_HEALTH_CHECK_TOTAL_CONNECTOR, (float) resultList.size()); + } + + // 先根据connect集群状态判断 + if (connectClusterService.existConnectClusterDown(clusterPhyId)) { + metrics.putMetric(CLUSTER_METRIC_HEALTH_STATE_CONNECTOR, (float) HealthStateEnum.POOR.getDimension()); + return metrics; + } + + metrics.putMetric(CLUSTER_METRIC_HEALTH_STATE_CONNECTOR, (float) this.calHealthState(resultList).getDimension()); + return metrics; + } + /**************************************************** 聚合数据 ****************************************************/ @@ -305,6 +402,61 @@ public class HealthStateServiceImpl implements HealthStateService { /**************************************************** 计算指标 ****************************************************/ + private List getDimensionHealthCheckAggResult(List poList, List dimensionCodeList) { + Map /*检查结果列表*/> checkResultMap = new HashMap<>(); + + for (HealthCheckResultPO po : poList) { + checkResultMap.putIfAbsent(po.getConfigName(), new ArrayList<>()); + checkResultMap.get(po.getConfigName()).add(po); + } + + List stateList = new ArrayList<>(); + for (Integer dimensionCode : dimensionCodeList) { + HealthCheckDimensionEnum dimensionEnum = HealthCheckDimensionEnum.getByCode(dimensionCode); + + if (dimensionEnum.equals(UNKNOWN)) { + continue; + } + + for (HealthCheckNameEnum nameEnum : HealthCheckNameEnum.getByDimension(dimensionEnum)) { + stateList.add(new HealthCheckAggResult(nameEnum, checkResultMap.getOrDefault(nameEnum.getConfigName(), new ArrayList<>()))); + } + } + return stateList; + } + + private List getResHealthResult(Long clusterPhyId, List dimensionCodeList, List poList) { + Map /*检查结果列表*/> checkResultMap = new HashMap<>(); + + for (HealthCheckResultPO po : poList) { + checkResultMap.putIfAbsent(po.getConfigName(), new ArrayList<>()); + checkResultMap.get(po.getConfigName()).add(po); + } + + Map configMap = healthCheckResultService.getClusterHealthConfig(clusterPhyId); + + List healthScoreResultList = new ArrayList<>(); + for (Integer dimensionCode : dimensionCodeList) { + HealthCheckDimensionEnum dimensionEnum = HealthCheckDimensionEnum.getByCode(dimensionCode); + + //该维度不存在,则跳过 + if (dimensionEnum.equals(HealthCheckDimensionEnum.UNKNOWN)){ + continue; + } + + for (HealthCheckNameEnum nameEnum : HealthCheckNameEnum.getByDimension(dimensionEnum)) { + BaseClusterHealthConfig baseConfig = configMap.get(nameEnum.getConfigName()); + if (baseConfig == null) { + continue; + } + + healthScoreResultList.add(new HealthScoreResult(nameEnum, baseConfig, checkResultMap.getOrDefault(nameEnum.getConfigName(), new ArrayList<>()))); + + } + } + return healthScoreResultList; + } + private float getHealthCheckPassed(List aggResultList){ if(ValidateUtils.isEmptyList(aggResultList)) { return 0f; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/kafkauser/impl/KafkaUserServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/kafkauser/impl/KafkaUserServiceImpl.java index e939f00d..3916ac9d 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/kafkauser/impl/KafkaUserServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/kafkauser/impl/KafkaUserServiceImpl.java @@ -27,6 +27,7 @@ import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService; import com.xiaojukeji.know.streaming.km.core.service.kafkauser.KafkaUserService; import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient; @@ -54,7 +55,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum. @Service -public class KafkaUserServiceImpl extends BaseVersionControlService implements KafkaUserService { +public class KafkaUserServiceImpl extends BaseKafkaVersionControlService implements KafkaUserService { private static final ILog log = LogFactory.getLog(KafkaUserServiceImpl.class); private static final String KAFKA_USER_REPLACE = "replaceKafkaUser"; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/OpPartitionServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/OpPartitionServiceImpl.java index 6dbb5816..838ac594 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/OpPartitionServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/OpPartitionServiceImpl.java @@ -10,7 +10,7 @@ import com.xiaojukeji.know.streaming.km.common.constant.KafkaConstant; import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum; import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException; import com.xiaojukeji.know.streaming.km.core.service.partition.OpPartitionService; -import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient; import kafka.zk.KafkaZkClient; @@ -36,7 +36,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT * @author didi */ @Service -public class OpPartitionServiceImpl extends BaseVersionControlService implements OpPartitionService { +public class OpPartitionServiceImpl extends BaseKafkaVersionControlService implements OpPartitionService { private static final ILog LOGGER = LogFactory.getLog(OpPartitionServiceImpl.class); @Autowired diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/PartitionMetricServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/PartitionMetricServiceImpl.java index 63e9bc36..39a95c31 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/PartitionMetricServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/PartitionMetricServiceImpl.java @@ -3,7 +3,6 @@ package com.xiaojukeji.know.streaming.km.core.service.partition.impl; import com.didiglobal.logi.log.ILog; import com.didiglobal.logi.log.LogFactory; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.PartitionMetrics; -import com.xiaojukeji.know.streaming.km.common.bean.entity.offset.KSOffsetSpec; import com.xiaojukeji.know.streaming.km.common.bean.entity.param.VersionItemParam; import com.xiaojukeji.know.streaming.km.common.bean.entity.param.metric.TopicMetricParam; import com.xiaojukeji.know.streaming.km.common.bean.entity.partition.Partition; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/PartitionServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/PartitionServiceImpl.java index a2094028..83222090 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/PartitionServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/partition/impl/PartitionServiceImpl.java @@ -26,7 +26,7 @@ import com.xiaojukeji.know.streaming.km.core.cache.DataBaseDataLocalCache; import com.xiaojukeji.know.streaming.km.persistence.kafka.zookeeper.znode.brokers.PartitionMap; import com.xiaojukeji.know.streaming.km.persistence.kafka.zookeeper.znode.brokers.PartitionState; import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionService; -import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaConsumerClient; import com.xiaojukeji.know.streaming.km.persistence.mysql.partition.PartitionDAO; @@ -57,7 +57,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT * @author didi */ @Service -public class PartitionServiceImpl extends BaseVersionControlService implements PartitionService { +public class PartitionServiceImpl extends BaseKafkaVersionControlService implements PartitionService { private static final ILog log = LogFactory.getLog(PartitionServiceImpl.class); private static final String PARTITION_OFFSET_GET = "getPartitionOffset"; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/reassign/impl/ReassignServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/reassign/impl/ReassignServiceImpl.java index 05ae8fb4..f406e10b 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/reassign/impl/ReassignServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/reassign/impl/ReassignServiceImpl.java @@ -19,7 +19,7 @@ import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; import com.xiaojukeji.know.streaming.km.core.service.reassign.ReassignService; import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService; -import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient; import kafka.admin.ReassignPartitionsCommand; @@ -42,7 +42,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum. import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.SERVICE_OP_REASSIGNMENT; @Service -public class ReassignServiceImpl extends BaseVersionControlService implements ReassignService { +public class ReassignServiceImpl extends BaseKafkaVersionControlService implements ReassignService { private static final ILog log = LogFactory.getLog(ReassignServiceImpl.class); private static final String EXECUTE_TASK = "executeTask"; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/topic/impl/OpTopicServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/topic/impl/OpTopicServiceImpl.java index 7cd017f4..5a7b0f76 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/topic/impl/OpTopicServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/topic/impl/OpTopicServiceImpl.java @@ -20,7 +20,7 @@ import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistExcept import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService; import com.xiaojukeji.know.streaming.km.core.service.topic.OpTopicService; import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService; -import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient; import com.xiaojukeji.know.streaming.km.persistence.kafka.zookeeper.service.KafkaZKDAO; @@ -48,7 +48,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT * @author didi */ @Service -public class OpTopicServiceImpl extends BaseVersionControlService implements OpTopicService { +public class OpTopicServiceImpl extends BaseKafkaVersionControlService implements OpTopicService { private static final ILog log = LogFactory.getLog(TopicConfigServiceImpl.class); private static final String TOPIC_CREATE = "createTopic"; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/topic/impl/TopicConfigServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/topic/impl/TopicConfigServiceImpl.java index 0149b5d4..735487b2 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/topic/impl/TopicConfigServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/topic/impl/TopicConfigServiceImpl.java @@ -26,7 +26,7 @@ import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService; import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService; import com.xiaojukeji.know.streaming.km.core.service.topic.TopicConfigService; import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService; -import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService; +import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient; import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient; import com.xiaojukeji.know.streaming.km.persistence.kafka.zookeeper.service.KafkaZKDAO; @@ -46,7 +46,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum. @Service -public class TopicConfigServiceImpl extends BaseVersionControlService implements TopicConfigService { +public class TopicConfigServiceImpl extends BaseKafkaVersionControlService implements TopicConfigService { private static final ILog log = LogFactory.getLog(TopicConfigServiceImpl.class); private static final String GET_TOPIC_CONFIG = "getTopicConfig"; diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseConnectorMetricService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseConnectorMetricService.java new file mode 100644 index 00000000..5efc4438 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseConnectorMetricService.java @@ -0,0 +1,74 @@ +package com.xiaojukeji.know.streaming.km.core.service.version; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchQuery; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricLineVO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO; +import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO; +import org.springframework.util.CollectionUtils; + +import javax.annotation.PostConstruct; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * @author wyb + * @date 2022/11/9 + */ +public abstract class BaseConnectorMetricService extends BaseConnectorVersionControlService{ + private static final ILog LOGGER = LogFactory.getLog(BaseMetricService.class); + + private List metricNames = new ArrayList<>(); + private List metricFields = new ArrayList<>(); + + @PostConstruct + public void init(){ + initMetricFieldAndNameList(); + initRegisterVCHandler(); + } + + protected void initMetricFieldAndNameList(){ + metricNames = listVersionControlItems().stream().map(v -> v.getName()).collect(Collectors.toList()); + metricFields = listMetricPOFields(); + } + + protected abstract List listMetricPOFields(); + + protected abstract void initRegisterVCHandler(); + + /** + * 检查 str 是不是一个 metricName + * @param str + */ + protected boolean isMetricName(String str){ + return metricNames.contains(str); + } + + /** + * 检查 str 是不是一个 fieldName + * @param str + */ + protected boolean isMetricField(String str){ + return metricFields.contains(str); + } + + protected void setQueryMetricFlag(SearchQuery query){ + if(null == query){return;} + + String fieldName = query.getQueryName(); + + query.setMetric(isMetricName(fieldName)); + query.setField(isMetricField(fieldName)); + } + + protected void setQueryMetricFlag(List matches){ + if(CollectionUtils.isEmpty(matches)){return;} + + for (SearchQuery match : matches){ + setQueryMetricFlag(match); + } + } +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseConnectorVersionControlService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseConnectorVersionControlService.java new file mode 100644 index 00000000..ced858ff --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseConnectorVersionControlService.java @@ -0,0 +1,55 @@ +package com.xiaojukeji.know.streaming.km.core.service.version; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.VersionItemParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionConnectJmxInfo; +import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import com.xiaojukeji.know.streaming.km.common.utils.Tuple; +import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.annotation.Nullable; + +/** + * @author wyb + * @date 2022/11/8 + */ +public abstract class BaseConnectorVersionControlService extends BaseVersionControlService { + + @Autowired + ConnectClusterService connectClusterService; + + @Nullable + protected Object doVCHandler(Long connectClusterId, String action, VersionItemParam param) throws VCHandlerNotExistException { + String versionStr = connectClusterService.getClusterVersion(connectClusterId); + + LOGGER.debug( + "method=doVCHandler||connectClusterId={}||action={}||type={}||param={}", + connectClusterId, action, getVersionItemType().getMessage(), ConvertUtil.obj2Json(param) + ); + + Tuple ret = doVCHandler(versionStr, action, param); + + LOGGER.debug( + "method=doVCHandler||clusterId={}||action={}||methodName={}||type={}||param={}||ret={}!", + connectClusterId, action, ret != null ?ret.getV2(): "", getVersionItemType().getMessage(), ConvertUtil.obj2Json(param), ConvertUtil.obj2Json(ret) + ); + + return ret == null? null: ret.getV1(); + } + + @Nullable + protected String getMethodName(Long connectClusterId, String action) { + String versionStr = connectClusterService.getClusterVersion(connectClusterId); + + return getMethodName(versionStr, action); + } + + @Nullable + protected VersionConnectJmxInfo getJMXInfo(Long connectClusterId, String action) { + String versionStr = connectClusterService.getClusterVersion(connectClusterId); + + return (VersionConnectJmxInfo) getJMXInfo(versionStr, action); + } + +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseKafkaVersionControlService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseKafkaVersionControlService.java new file mode 100644 index 00000000..45db0c18 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseKafkaVersionControlService.java @@ -0,0 +1,52 @@ +package com.xiaojukeji.know.streaming.km.core.service.version; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.param.VersionItemParam; +import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionJmxInfo; +import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import com.xiaojukeji.know.streaming.km.common.utils.Tuple; +import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.annotation.Nullable; + +/** + * @author didi + */ +public abstract class BaseKafkaVersionControlService extends BaseVersionControlService{ + @Autowired + private ClusterPhyService clusterPhyService; + + @Nullable + protected Object doVCHandler(Long clusterPhyId, String action, VersionItemParam param) throws VCHandlerNotExistException { + String versionStr = clusterPhyService.getVersionFromCacheFirst(clusterPhyId); + + LOGGER.info( + "method=doVCHandler||clusterId={}||action={}||type={}||param={}", + clusterPhyId, action, getVersionItemType().getMessage(), ConvertUtil.obj2Json(param) + ); + + Tuple ret = doVCHandler(versionStr, action, param); + + LOGGER.debug( + "method=doVCHandler||clusterId={}||action={}||methodName={}||type={}||param={}||ret={}!", + clusterPhyId, action, ret != null ?ret.getV2(): "", getVersionItemType().getMessage(), ConvertUtil.obj2Json(param), ConvertUtil.obj2Json(ret) + ); + + return ret == null? null: ret.getV1(); + } + + @Nullable + protected String getMethodName(Long clusterPhyId, String action) { + String versionStr = clusterPhyService.getVersionFromCacheFirst(clusterPhyId); + + return getMethodName(versionStr, action); + } + + @Nullable + protected VersionJmxInfo getJMXInfo(Long clusterPhyId, String action){ + String versionStr = clusterPhyService.getVersionFromCacheFirst(clusterPhyId); + + return getJMXInfo(versionStr, action); + } +} diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseMetricService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseMetricService.java index d97a5243..82f841e1 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseMetricService.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseMetricService.java @@ -17,7 +17,7 @@ import java.util.stream.Collectors; /** * @author didi */ -public abstract class BaseMetricService extends BaseVersionControlService { +public abstract class BaseMetricService extends BaseKafkaVersionControlService { private static final ILog LOGGER = LogFactory.getLog(BaseMetricService.class); private List metricNames = new ArrayList<>(); diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseVersionControlService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseVersionControlService.java index cb35befd..06c06286 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseVersionControlService.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/BaseVersionControlService.java @@ -1,6 +1,5 @@ package com.xiaojukeji.know.streaming.km.core.service.version; -import com.alibaba.fastjson.JSON; import com.didiglobal.logi.log.ILog; import com.didiglobal.logi.log.LogFactory; import com.xiaojukeji.know.streaming.km.common.bean.entity.param.VersionItemParam; @@ -10,6 +9,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionMethod import com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum; import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum; import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException; +import com.xiaojukeji.know.streaming.km.common.utils.Tuple; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.DependsOn; import org.springframework.util.CollectionUtils; @@ -56,20 +56,14 @@ public abstract class BaseVersionControlService { } @Nullable - protected Object doVCHandler(Long clusterPhyId, String action, VersionItemParam param) throws VCHandlerNotExistException { - String methodName = getMethodName(clusterPhyId, action); - Object ret = versionControlService.doHandler(getVersionItemType(), methodName, param); + protected Tuple doVCHandler(String version, String action, VersionItemParam param) throws VCHandlerNotExistException { + String methodName = getMethodName(version, action); - LOGGER.debug( - "method=doVCHandler||clusterId={}||action={}||methodName={}||type={}param={}||ret={}!", - clusterPhyId, action, methodName, getVersionItemType().getMessage(), JSON.toJSONString(param), JSON.toJSONString(ret) - ); - - return ret; + return new Tuple<>(versionControlService.doHandler(getVersionItemType(), methodName, param), methodName); } - protected String getMethodName(Long clusterId, String action) { - VersionControlItem item = versionControlService.getVersionControlItem(clusterId, getVersionItemType().getCode(), action); + protected String getMethodName(String version, String action) { + VersionControlItem item = versionControlService.getVersionControlItem(version, getVersionItemType().getCode(), action); if (null == item) { return ""; } @@ -81,8 +75,8 @@ public abstract class BaseVersionControlService { return ""; } - protected VersionJmxInfo getJMXInfo(Long clusterId, String action){ - VersionControlItem item = versionControlService.getVersionControlItem(clusterId, getVersionItemType().getCode(), action); + protected VersionJmxInfo getJMXInfo(String version, String action){ + VersionControlItem item = versionControlService.getVersionControlItem(version, getVersionItemType().getCode(), action); if (null == item) { return null; } diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/VersionControlService.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/VersionControlService.java index 093722a2..f46399f4 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/VersionControlService.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/VersionControlService.java @@ -6,7 +6,6 @@ import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException; import java.util.List; -import java.util.Map; import java.util.function.Function; /** @@ -45,11 +44,11 @@ public interface VersionControlService { /** * 获取对应集群的版本兼容项 - * @param clusterId + * @param version * @param type * @return */ - List listVersionControlItem(Long clusterId, Integer type); + List listVersionControlItem(String version, Integer type); /** * 获取对应type所有的的版本兼容项 @@ -68,27 +67,18 @@ public interface VersionControlService { /** * 查询对应指标的版本兼容项 - * @param clusterId + * @param version * @param type * @param itemName * @return */ - VersionControlItem getVersionControlItem(Long clusterId, Integer type, String itemName); + VersionControlItem getVersionControlItem(String version, Integer type, String itemName); /** * 判断 item 是否被 clusterId 对应的版本支持 - * @param clusterId + * @param version * @param item * @return */ - boolean isClusterSupport(Long clusterId, VersionControlItem item); - - /** - * 查询对应指标的版本兼容项 - * @param clusterId - * @param type - * @param itemNames - * @return - */ - Map getVersionControlItems(Long clusterId, Integer type, List itemNames); + boolean isClusterSupport(String version, VersionControlItem item); } diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/impl/VersionControlServiceImpl.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/impl/VersionControlServiceImpl.java index de563b03..b70c5ee0 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/impl/VersionControlServiceImpl.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/impl/VersionControlServiceImpl.java @@ -7,11 +7,8 @@ import com.xiaojukeji.know.streaming.km.common.component.SpringTool; import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum; import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException; import com.xiaojukeji.know.streaming.km.common.utils.VersionUtil; -import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService; import com.xiaojukeji.know.streaming.km.core.service.version.VersionControlMetricService; import com.xiaojukeji.know.streaming.km.core.service.version.VersionControlService; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.DependsOn; import org.springframework.stereotype.Service; import org.springframework.util.CollectionUtils; @@ -26,18 +23,24 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Function; -@Slf4j @DependsOn("springTool") @Service("versionControlService") public class VersionControlServiceImpl implements VersionControlService { + /** + * key:versionItemType + */ + private final Map> versionItemMap = new ConcurrentHashMap<>(); - @Autowired - private ClusterPhyService clusterPhyService; + /** + * key:versionItemType + * key1:metricName + */ + private final Map>> versionItemMetricNameMap = new ConcurrentHashMap<>(); - private final Map> versionItemMap = new ConcurrentHashMap<>(); - private final Map>> versionItemMetricNameMap = new ConcurrentHashMap<>(); - - private final Map> functionMap = new ConcurrentHashMap<>(); + /** + * key : VersionItemTypeEnum.code@methodName + */ + private final Map> functionMap = new ConcurrentHashMap<>(); @PostConstruct public void init(){ @@ -51,7 +54,7 @@ public class VersionControlServiceImpl implements VersionControlService { @Override public void registerHandler(VersionItemTypeEnum typeEnum, String methodName, Function func){ - functionMap.put(typeEnum.getCode() + "@" + methodName , func); + functionMap.put(versionFunctionKey(typeEnum.getCode(), methodName), func); } @Override @@ -76,24 +79,23 @@ public class VersionControlServiceImpl implements VersionControlService { itemMap.put(action, controlItems); versionItemMetricNameMap.put(typeCode, itemMap); - functionMap.put(typeCode + "@" + methodName , func); + functionMap.put(versionFunctionKey(typeCode, methodName), func); } @Nullable @Override public Object doHandler(VersionItemTypeEnum typeEnum, String methodName, VersionItemParam param) throws VCHandlerNotExistException { - Function func = functionMap.get(typeEnum.getCode() + "@" + methodName); + Function func = functionMap.get(versionFunctionKey(typeEnum.getCode(), methodName)); if(null == func) { - throw new VCHandlerNotExistException(typeEnum.getCode() + "@" + methodName); + throw new VCHandlerNotExistException(versionFunctionKey(typeEnum.getCode(), methodName)); } return func.apply(param); } @Override - public List listVersionControlItem(Long clusterId, Integer type) { - String versionStr = clusterPhyService.getVersionFromCacheFirst(clusterId); - long versionLong = VersionUtil.normailze(versionStr); + public List listVersionControlItem(String version, Integer type) { + long versionLong = VersionUtil.normailze(version); List items = versionItemMap.get(type); if(CollectionUtils.isEmpty(items)) { @@ -122,8 +124,8 @@ public class VersionControlServiceImpl implements VersionControlService { } @Override - public VersionControlItem getVersionControlItem(Long clusterId, Integer type, String itemName) { - List items = listVersionControlItem(clusterId, type); + public VersionControlItem getVersionControlItem(String version, Integer type, String itemName) { + List items = listVersionControlItem(version, type); for(VersionControlItem item : items){ if(itemName.equals(item.getName())){ @@ -135,24 +137,13 @@ public class VersionControlServiceImpl implements VersionControlService { } @Override - public boolean isClusterSupport(Long clusterId, VersionControlItem item){ - String versionStr = clusterPhyService.getVersionFromCacheFirst(clusterId); - long versionLong = VersionUtil.normailze(versionStr); - + public boolean isClusterSupport(String version, VersionControlItem item) { + long versionLong = VersionUtil.normailze(version); return item.getMinVersion() <= versionLong && versionLong < item.getMaxVersion(); } - @Override - public Map getVersionControlItems(Long clusterId, Integer type, List itemNames){ - Map versionControlItemMap = new HashMap<>(); - - for(String itemName : itemNames){ - VersionControlItem item = getVersionControlItem(clusterId, type, itemName); - if(null != item){ - versionControlItemMap.put(itemName, item); - } - } - - return versionControlItemMap; + /**************************************************** private method ****************************************************/ + private String versionFunctionKey(int typeCode, String methodName){ + return typeCode + "@" + methodName; } } diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/BaseMetricVersionMetric.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/BaseMetricVersionMetric.java index 41e702c7..7484fa2d 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/BaseMetricVersionMetric.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/BaseMetricVersionMetric.java @@ -1,8 +1,10 @@ package com.xiaojukeji.know.streaming.km.core.service.version.metrics; +import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionConnectJmxInfo; import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionMetricControlItem; import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionMethodInfo; import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionJmxInfo; +import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum; import com.xiaojukeji.know.streaming.km.core.service.version.VersionControlMetricService; import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum.V_0_10_0_0; @@ -58,4 +60,17 @@ public abstract class BaseMetricVersionMetric implements VersionControlMetricSer jmxExtendInfo.setMethodName(methodName); return jmxExtendInfo; } + + protected VersionConnectJmxInfo buildConnectJMXMethodExtend(String methodName) { + VersionConnectJmxInfo connectorJmxInfo = new VersionConnectJmxInfo(); + connectorJmxInfo.setMethodName(methodName); + return connectorJmxInfo; + } + + protected VersionConnectJmxInfo buildConnectJMXMethodExtend(String methodName, ConnectorTypeEnum type) { + VersionConnectJmxInfo connectorJmxInfo = new VersionConnectJmxInfo(); + connectorJmxInfo.setMethodName(methodName); + connectorJmxInfo.setType(type); + return connectorJmxInfo; + } } diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/connect/ConnectClusterMetricVersionItems.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/connect/ConnectClusterMetricVersionItems.java new file mode 100644 index 00000000..09b7483d --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/connect/ConnectClusterMetricVersionItems.java @@ -0,0 +1,110 @@ +package com.xiaojukeji.know.streaming.km.core.service.version.metrics.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionMetricControlItem; +import com.xiaojukeji.know.streaming.km.common.constant.Constant; +import com.xiaojukeji.know.streaming.km.core.service.version.metrics.BaseMetricVersionMetric; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +import static com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionMetricControlItem.CATEGORY_CLUSTER; +import static com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionMetricControlItem.CATEGORY_PERFORMANCE; +import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.METRIC_CONNECT_CLUSTER; +import static com.xiaojukeji.know.streaming.km.common.jmx.JmxAttribute.*; +import static com.xiaojukeji.know.streaming.km.common.jmx.JmxName.JMX_CONNECT_WORKER_METRIC; +import static com.xiaojukeji.know.streaming.km.core.service.connect.cluster.impl.ConnectClusterMetricServiceImpl.*; + + +@Component +public class ConnectClusterMetricVersionItems extends BaseMetricVersionMetric { + public static final String CONNECT_CLUSTER_METRIC_CONNECTOR_COUNT = "ConnectorCount"; + public static final String CONNECT_CLUSTER_METRIC_TASK_COUNT = "TaskCount"; + + public static final String CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_ATTEMPTS_TOTAL = "ConnectorStartupAttemptsTotal"; + + public static final String CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_FAILURE_PERCENTAGE = "ConnectorStartupFailurePercentage"; + + public static final String CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_FAILURE_TOTAL = "ConnectorStartupFailureTotal"; + + public static final String CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_SUCCESS_PERCENTAGE = "ConnectorStartupSuccessPercentage"; + + public static final String CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_SUCCESS_TOTAL = "ConnectorStartupSuccessTotal"; + + public static final String CONNECT_CLUSTER_METRIC_TASK_STARTUP_ATTEMPTS_TOTAL = "TaskStartupAttemptsTotal"; + + public static final String CONNECT_CLUSTER_METRIC_TASK_STARTUP_FAILURE_PERCENTAGE = "TaskStartupFailurePercentage"; + + public static final String CONNECT_CLUSTER_METRIC_TASK_STARTUP_FAILURE_TOTAL = "TaskStartupFailureTotal"; + + public static final String CONNECT_CLUSTER_METRIC_TASK_STARTUP_SUCCESS_PERCENTAGE = "TaskStartupSuccessPercentage"; + + public static final String CONNECT_CLUSTER_METRIC_TASK_STARTUP_SUCCESS_TOTAL = "TaskStartupSuccessTotal"; + + public static final String CONNECT_CLUSTER_METRIC_COLLECT_COST_TIME = Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME; + + + @Override + public int versionItemType() { + return METRIC_CONNECT_CLUSTER.getCode(); + } + + @Override + public List init() { + List items = new ArrayList<>(); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_CONNECTOR_COUNT).unit("个").desc("连接器数量").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(TASK_COUNT))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_TASK_COUNT).unit("个").desc("任务数量").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(TASK_COUNT))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_ATTEMPTS_TOTAL).unit("次").desc("连接器启动次数").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(CONNECTOR_STARTUP_ATTEMPTS_TOTAL))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_FAILURE_PERCENTAGE).unit("%").desc("连接器启动失败概率").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_AVG) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(CONNECTOR_STARTUP_FAILURE_PERCENTAGE))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_FAILURE_TOTAL).unit("次").desc("连接器启动失败次数").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(CONNECTOR_STARTUP_FAILURE_TOTAL))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_SUCCESS_PERCENTAGE).unit("%").desc("连接器启动成功概率").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_AVG) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(CONNECTOR_STARTUP_SUCCESS_PERCENTAGE))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_CONNECTOR_STARTUP_SUCCESS_TOTAL).unit("次").desc("连接器启动成功次数").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(CONNECTOR_STARTUP_SUCCESS_TOTAL))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_TASK_STARTUP_ATTEMPTS_TOTAL).unit("次").desc("任务启动次数").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(TASK_STARTUP_ATTEMPTS_TOTAL))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_TASK_STARTUP_FAILURE_PERCENTAGE).unit("%").desc("任务启动失败概率").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_AVG) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(TASK_STARTUP_FAILURE_PERCENTAGE))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_TASK_STARTUP_FAILURE_TOTAL).unit("次").desc("任务启动失败次数").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(TASK_STARTUP_FAILURE_TOTAL))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_TASK_STARTUP_SUCCESS_PERCENTAGE).unit("%").desc("任务启动成功概率").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_AVG) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(TASK_STARTUP_SUCCESS_PERCENTAGE))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_TASK_STARTUP_SUCCESS_TOTAL).unit("次").desc("任务启动成功次数").category(CATEGORY_CLUSTER) + .extend(buildConnectJMXMethodExtend(CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_METRIC).jmxAttribute(TASK_STARTUP_SUCCESS_TOTAL))); + items.add(buildAllVersionsItem() + .name(CONNECT_CLUSTER_METRIC_COLLECT_COST_TIME).unit("秒").desc("采集connect集群指标耗时").category(CATEGORY_PERFORMANCE) + .extendMethod(CONNECT_CLUSTER_METHOD_DO_NOTHING)); + return items; + } + +} + diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/connect/ConnectorMetricVersionItems.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/connect/ConnectorMetricVersionItems.java new file mode 100644 index 00000000..cb9ddd07 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/connect/ConnectorMetricVersionItems.java @@ -0,0 +1,310 @@ +package com.xiaojukeji.know.streaming.km.core.service.version.metrics.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionMetricControlItem; +import com.xiaojukeji.know.streaming.km.common.constant.Constant; +import com.xiaojukeji.know.streaming.km.core.service.version.metrics.BaseMetricVersionMetric; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +import static com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionMetricControlItem.*; +import static com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum.SINK; +import static com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum.SOURCE; +import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.METRIC_CONNECT_CONNECTOR; +import static com.xiaojukeji.know.streaming.km.common.jmx.JmxAttribute.*; +import static com.xiaojukeji.know.streaming.km.common.jmx.JmxName.*; +import static com.xiaojukeji.know.streaming.km.core.service.broker.impl.BrokerMetricServiceImpl.BROKER_METHOD_DO_NOTHING; +import static com.xiaojukeji.know.streaming.km.core.service.connect.connector.impl.ConnectorMetricServiceImpl.*; + + +@Component +public class ConnectorMetricVersionItems extends BaseMetricVersionMetric { + + public static final String CONNECTOR_METRIC_COLLECT_COST_TIME = Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME; + + public static final String CONNECTOR_METRIC_HEALTH_STATE = "HealthState"; + + public static final String CONNECTOR_METRIC_CONNECTOR_TOTAL_TASK_COUNT = "ConnectorTotalTaskCount"; + + public static final String CONNECTOR_METRIC_HEALTH_CHECK_PASSED = "HealthCheckPassed"; + + public static final String CONNECTOR_METRIC_HEALTH_CHECK_TOTAL = "HealthCheckTotal"; + + public static final String CONNECTOR_METRIC_CONNECTOR_RUNNING_TASK_COUNT = "ConnectorRunningTaskCount"; + + public static final String CONNECTOR_METRIC_CONNECTOR_PAUSED_TASK_COUNT = "ConnectorPausedTaskCount"; + + public static final String CONNECTOR_METRIC_CONNECTOR_FAILED_TASK_COUNT = "ConnectorFailedTaskCount"; + + public static final String CONNECTOR_METRIC_CONNECTOR_UNASSIGNED_TASK_COUNT = "ConnectorUnassignedTaskCount"; + + public static final String CONNECTOR_METRIC_BATCH_SIZE_AVG = "BatchSizeAvg"; + + public static final String CONNECTOR_METRIC_BATCH_SIZE_MAX = "BatchSizeMax"; + + public static final String CONNECTOR_METRIC_OFFSET_COMMIT_AVG_TIME_MS = "OffsetCommitAvgTimeMs"; + + public static final String CONNECTOR_METRIC_OFFSET_COMMIT_MAX_TIME_MS = "OffsetCommitMaxTimeMs"; + + public static final String CONNECTOR_METRIC_OFFSET_COMMIT_FAILURE_PERCENTAGE = "OffsetCommitFailurePercentage"; + + public static final String CONNECTOR_METRIC_OFFSET_COMMIT_SUCCESS_PERCENTAGE = "OffsetCommitSuccessPercentage"; + + public static final String CONNECTOR_METRIC_POLL_BATCH_AVG_TIME_MS = "PollBatchAvgTimeMs"; + + public static final String CONNECTOR_METRIC_POLL_BATCH_MAX_TIME_MS = "PollBatchMaxTimeMs"; + + public static final String CONNECTOR_METRIC_SOURCE_RECORD_ACTIVE_COUNT = "SourceRecordActiveCount"; + + public static final String CONNECTOR_METRIC_SOURCE_RECORD_ACTIVE_COUNT_AVG = "SourceRecordActiveCountAvg"; + + public static final String CONNECTOR_METRIC_SOURCE_RECORD_ACTIVE_COUNT_MAX = "SourceRecordActiveCountMax"; + + public static final String CONNECTOR_METRIC_SOURCE_RECORD_POLL_RATE = "SourceRecordPollRate"; + + public static final String CONNECTOR_METRIC_SOURCE_RECORD_POLL_TOTAL = "SourceRecordPollTotal"; + + public static final String CONNECTOR_METRIC_SOURCE_RECORD_WRITE_RATE = "SourceRecordWriteRate"; + + public static final String CONNECTOR_METRIC_SOURCE_RECORD_WRITE_TOTAL = "SourceRecordWriteTotal"; + + public static final String CONNECTOR_METRIC_OFFSET_COMMIT_COMPLETION_RATE = "OffsetCommitCompletionRate"; + + public static final String CONNECTOR_METRIC_OFFSET_COMMIT_COMPLETION_TOTAL = "OffsetCommitCompletionTotal"; + + public static final String CONNECTOR_METRIC_OFFSET_COMMIT_SKIP_RATE = "OffsetCommitSkipRate"; + + public static final String CONNECTOR_METRIC_OFFSET_COMMIT_SKIP_TOTAL = "OffsetCommitSkipTotal"; + + public static final String CONNECTOR_METRIC_PARTITION_COUNT = "PartitionCount"; + + public static final String CONNECTOR_METRIC_PUT_BATCH_AVG_TIME_MS = "PutBatchAvgTimeMs"; + + public static final String CONNECTOR_METRIC_PUT_BATCH_MAX_TIME_MS = "PutBatchMaxTimeMs"; + + public static final String CONNECTOR_METRIC_SINK_RECORD_ACTIVE_COUNT = "SinkRecordActiveCount"; + + public static final String CONNECTOR_METRIC_SINK_RECORD_ACTIVE_COUNT_AVG = "SinkRecordActiveCountAvg"; + + public static final String CONNECTOR_METRIC_SINK_RECORD_ACTIVE_COUNT_MAX = "SinkRecordActiveCountMax"; + + public static final String CONNECTOR_METRIC_SINK_RECORD_LAG_MAX = "SinkRecordLagMax"; + + public static final String CONNECTOR_METRIC_SINK_RECORD_READ_RATE = "SinkRecordReadRate"; + + public static final String CONNECTOR_METRIC_SINK_RECORD_READ_TOTAL = "SinkRecordReadTotal"; + + public static final String CONNECTOR_METRIC_SINK_RECORD_SEND_RATE = "SinkRecordSendRate"; + + public static final String CONNECTOR_METRIC_SINK_RECORD_SEND_TOTAL = "SinkRecordSendTotal"; + + public static final String CONNECTOR_METRIC_DEADLETTERQUEUE_PRODUCE_FAILURES = "DeadletterqueueProduceFailures"; + + public static final String CONNECTOR_METRIC_DEADLETTERQUEUE_PRODUCE_REQUESTS = "DeadletterqueueProduceRequests"; + + public static final String CONNECTOR_METRIC_LAST_ERROR_TIMESTAMP = "LastErrorTimestamp"; + + public static final String CONNECTOR_METRIC_TOTAL_ERRORS_LOGGED = "TotalErrorsLogged"; + + public static final String CONNECTOR_METRIC_TOTAL_RECORD_ERRORS = "TotalRecordErrors"; + + public static final String CONNECTOR_METRIC_TOTAL_RECORD_FAILURES = "TotalRecordFailures"; + + public static final String CONNECTOR_METRIC_TOTAL_RECORDS_SKIPPED = "TotalRecordsSkipped"; + + public static final String CONNECTOR_METRIC_TOTAL_RETRIES = "TotalRetries"; + + @Override + public int versionItemType() { + return METRIC_CONNECT_CONNECTOR.getCode(); + } + + @Override + public List init() { + List items = new ArrayList<>(); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_COLLECT_COST_TIME).unit("秒").desc("采集connector指标的耗时").category(CATEGORY_PERFORMANCE) + .extendMethod(CONNECTOR_METHOD_DO_NOTHING)); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_HEALTH_STATE).unit("0:好 1:中 2:差 3:宕机").desc("健康状态(0:好 1:中 2:差 3:宕机)").category(CATEGORY_HEALTH) + .extendMethod(CONNECTOR_METHOD_GET_METRIC_HEALTH_SCORE)); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_CONNECTOR_TOTAL_TASK_COUNT).unit("个").desc("所有任务数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECT_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_CONNECTOR_METRIC).jmxAttribute(CONNECTOR_TOTAL_TASK_COUNT))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_CONNECTOR_RUNNING_TASK_COUNT).unit("个").desc("运行状态的任务数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECT_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_CONNECTOR_METRIC).jmxAttribute(CONNECTOR_RUNNING_TASK_COUNT))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_CONNECTOR_PAUSED_TASK_COUNT).unit("个").desc("暂停状态的任务数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECT_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_CONNECTOR_METRIC).jmxAttribute(CONNECTOR_PAUSED_TASK_COUNT))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_CONNECTOR_FAILED_TASK_COUNT).unit("个").desc("失败状态的任务数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECT_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_CONNECTOR_METRIC).jmxAttribute(CONNECTOR_FAILED_TASK_COUNT))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_CONNECTOR_UNASSIGNED_TASK_COUNT).unit("个").desc("未被分配的任务数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECT_WORKER_METRIC_SUM) + .jmxObjectName(JMX_CONNECT_WORKER_CONNECTOR_METRIC).jmxAttribute(CONNECTOR_UNASSIGNED_TASK_COUNT))); + + + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_BATCH_SIZE_AVG).unit("条").desc("批次数量平均值").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_AVG) + .jmxObjectName(JMX_CONNECTOR_TASK_CONNECTOR_METRIC).jmxAttribute(BATCH_SIZE_AVG))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_BATCH_SIZE_MAX).unit("条").desc("批次数量最大值").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_MAX) + .jmxObjectName(JMX_CONNECTOR_TASK_CONNECTOR_METRIC).jmxAttribute(BATCH_SIZE_MAX))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_OFFSET_COMMIT_AVG_TIME_MS).unit("ms").desc("位点提交平均耗时").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_AVG) + .jmxObjectName(JMX_CONNECTOR_TASK_CONNECTOR_METRIC).jmxAttribute(OFFSET_COMMIT_AVG_TIME_MS))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_OFFSET_COMMIT_MAX_TIME_MS).unit("ms").desc("位点提交最大耗时").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_MAX) + .jmxObjectName(JMX_CONNECTOR_TASK_CONNECTOR_METRIC).jmxAttribute(OFFSET_COMMIT_MAX_TIME_MS))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_OFFSET_COMMIT_FAILURE_PERCENTAGE).unit("%").desc("位点提交失败概率").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_AVG) + .jmxObjectName(JMX_CONNECTOR_TASK_CONNECTOR_METRIC).jmxAttribute(OFFSET_COMMIT_FAILURE_PERCENTAGE))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_OFFSET_COMMIT_SUCCESS_PERCENTAGE).unit("%").desc("位点提交成功概率").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_AVG) + .jmxObjectName(JMX_CONNECTOR_TASK_CONNECTOR_METRIC).jmxAttribute(OFFSET_COMMIT_SUCCESS_PERCENTAGE))); + + + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_POLL_BATCH_AVG_TIME_MS).unit("ms").desc("POLL平均耗时").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_AVG, SOURCE) + .jmxObjectName(JMX_CONNECTOR_SOURCE_TASK_METRICS).jmxAttribute(POLL_BATCH_AVG_TIME_MS))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_POLL_BATCH_MAX_TIME_MS).unit("ms").desc("POLL最大耗时").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_MAX, SOURCE) + .jmxObjectName(JMX_CONNECTOR_SOURCE_TASK_METRICS).jmxAttribute(POLL_BATCH_MAX_TIME_MS))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SOURCE_RECORD_ACTIVE_COUNT).unit("条").desc("pending状态消息数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SOURCE) + .jmxObjectName(JMX_CONNECTOR_SOURCE_TASK_METRICS).jmxAttribute(SOURCE_RECORD_ACTIVE_COUNT))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SOURCE_RECORD_ACTIVE_COUNT_AVG).unit("条").desc("pending状态平均消息数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SOURCE) + .jmxObjectName(JMX_CONNECTOR_SOURCE_TASK_METRICS).jmxAttribute(SOURCE_RECORD_ACTIVE_COUNT_AVG))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SOURCE_RECORD_ACTIVE_COUNT_MAX).unit("条").desc("pending状态最大消息数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SOURCE) + .jmxObjectName(JMX_CONNECTOR_SOURCE_TASK_METRICS).jmxAttribute(SOURCE_RECORD_ACTIVE_COUNT_MAX))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SOURCE_RECORD_POLL_RATE).unit(BYTE_PER_SEC).desc("消息读取速率").category(CATEGORY_FLOW) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SOURCE) + .jmxObjectName(JMX_CONNECTOR_SOURCE_TASK_METRICS).jmxAttribute(SOURCE_RECORD_POLL_RATE))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SOURCE_RECORD_POLL_TOTAL).unit("条").desc("消息读取总数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SOURCE) + .jmxObjectName(JMX_CONNECTOR_SOURCE_TASK_METRICS).jmxAttribute(SOURCE_RECORD_POLL_TOTAL))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SOURCE_RECORD_WRITE_RATE).unit(BYTE_PER_SEC).desc("消息写入速率").category(CATEGORY_FLOW) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SOURCE) + .jmxObjectName(JMX_CONNECTOR_SOURCE_TASK_METRICS).jmxAttribute(SOURCE_RECORD_WRITE_RATE))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SOURCE_RECORD_WRITE_TOTAL).unit("条").desc("消息写入总数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SOURCE) + .jmxObjectName(JMX_CONNECTOR_SOURCE_TASK_METRICS).jmxAttribute(SOURCE_RECORD_WRITE_TOTAL))); + + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_OFFSET_COMMIT_COMPLETION_RATE).unit(BYTE_PER_SEC).desc("成功的位点提交速率").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(OFFSET_COMMIT_COMPLETION_RATE))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_OFFSET_COMMIT_COMPLETION_TOTAL).unit("个").desc("成功的位点提交总数").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(OFFSET_COMMIT_COMPLETION_TOTAL))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_OFFSET_COMMIT_SKIP_RATE).unit("").desc("被跳过的位点提交速率").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(OFFSET_COMMIT_SKIP_RATE))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_OFFSET_COMMIT_SKIP_TOTAL).unit("").desc("被跳过的位点提交总数").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(OFFSET_COMMIT_SKIP_TOTAL))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_PARTITION_COUNT).unit("个").desc("被分配到的分区数").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(PARTITION_COUNT))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_PUT_BATCH_AVG_TIME_MS).unit("ms").desc("PUT平均耗时").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_AVG, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(PUT_BATCH_AVG_TIME_MS))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_PUT_BATCH_MAX_TIME_MS).unit("ms").desc("PUT最大耗时").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_MAX, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(PUT_BATCH_MAX_TIME_MS))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SINK_RECORD_ACTIVE_COUNT).unit("条").desc("pending状态消息数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(SINK_RECORD_ACTIVE_COUNT))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SINK_RECORD_ACTIVE_COUNT_AVG).unit("条").desc("pending状态平均消息数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(SINK_RECORD_ACTIVE_COUNT_AVG))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SINK_RECORD_ACTIVE_COUNT_MAX).unit("条").desc("pending状态最大消息数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(SINK_RECORD_ACTIVE_COUNT_MAX))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SINK_RECORD_READ_RATE).unit(BYTE_PER_SEC).desc("消息读取速率").category(CATEGORY_FLOW) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(SINK_RECORD_READ_RATE))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SINK_RECORD_READ_TOTAL).unit("条").desc("消息读取总数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(SINK_RECORD_READ_TOTAL))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SINK_RECORD_SEND_RATE).unit(BYTE_PER_SEC).desc("消息写入速率").category(CATEGORY_FLOW) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(SINK_RECORD_SEND_RATE))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_SINK_RECORD_SEND_TOTAL).unit("条").desc("消息写入总数量").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, SINK) + .jmxObjectName(JMX_CONNECTOR_SINK_TASK_METRICS).jmxAttribute(SINK_RECORD_SEND_TOTAL))); + + + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_DEADLETTERQUEUE_PRODUCE_FAILURES).unit("次").desc("死信队列写入失败数").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM) + .jmxObjectName(JMX_CONNECTOR_TASK_ERROR_METRICS).jmxAttribute(DEADLETTERQUEUE_PRODUCE_FAILURES))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_DEADLETTERQUEUE_PRODUCE_REQUESTS).unit("次").desc("死信队列写入数").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM) + .jmxObjectName(JMX_CONNECTOR_TASK_ERROR_METRICS).jmxAttribute(DEADLETTERQUEUE_PRODUCE_REQUESTS))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_LAST_ERROR_TIMESTAMP).unit("").desc("最后一次错误时间").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_MAX) + .jmxObjectName(JMX_CONNECTOR_TASK_ERROR_METRICS).jmxAttribute(LAST_ERROR_TIMESTAMP))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_TOTAL_ERRORS_LOGGED).unit("条").desc("记录日志的错误消息数").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM) + .jmxObjectName(JMX_CONNECTOR_TASK_ERROR_METRICS).jmxAttribute(TOTAL_ERRORS_LOGGED))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_TOTAL_RECORD_ERRORS).unit("次").desc("消息处理错误的次数(异常消息数量)").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM) + .jmxObjectName(JMX_CONNECTOR_TASK_ERROR_METRICS).jmxAttribute(TOTAL_RECORD_ERRORS))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_TOTAL_RECORD_FAILURES).unit("次").desc("消息处理失败的次数(每次retry处理失败都会+1)").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM) + .jmxObjectName(JMX_CONNECTOR_TASK_ERROR_METRICS).jmxAttribute(TOTAL_RECORD_FAILURES))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_TOTAL_RECORDS_SKIPPED).unit("条").desc("因为失败导致跳过(未处理)的消息数").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM) + .jmxObjectName(JMX_CONNECTOR_TASK_ERROR_METRICS).jmxAttribute(TOTAL_RECORDS_SKIPPED))); + items.add(buildAllVersionsItem() + .name(CONNECTOR_METRIC_TOTAL_RETRIES).unit("次").desc("失败重试的次数").category(CATEGORY_PERFORMANCE) + .extend(buildConnectJMXMethodExtend(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM) + .jmxObjectName(JMX_CONNECTOR_TASK_ERROR_METRICS).jmxAttribute(TOTAL_RETRIES))); + return items; + } +} + diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/connect/MirrorMakerMetricVersionItems.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/connect/MirrorMakerMetricVersionItems.java new file mode 100644 index 00000000..b5256e31 --- /dev/null +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/connect/MirrorMakerMetricVersionItems.java @@ -0,0 +1,27 @@ +package com.xiaojukeji.know.streaming.km.core.service.version.metrics.connect; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionMetricControlItem; +import com.xiaojukeji.know.streaming.km.core.service.version.metrics.BaseMetricVersionMetric; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.METRIC_CONNECT_MIRROR_MAKER; + +@Component +public class MirrorMakerMetricVersionItems extends BaseMetricVersionMetric { + + @Override + public int versionItemType() { + return METRIC_CONNECT_MIRROR_MAKER.getCode(); + } + + @Override + public List init(){ + List items = new ArrayList<>(); + + return items; + } +} + diff --git a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/kafka/ClusterMetricVersionItems.java b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/kafka/ClusterMetricVersionItems.java index c19bebc6..5a72b38c 100644 --- a/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/kafka/ClusterMetricVersionItems.java +++ b/km-core/src/main/java/com/xiaojukeji/know/streaming/km/core/service/version/metrics/kafka/ClusterMetricVersionItems.java @@ -55,6 +55,13 @@ public class ClusterMetricVersionItems extends BaseMetricVersionMetric { public static final String CLUSTER_METRIC_HEALTH_CHECK_PASSED_CLUSTER = "HealthCheckPassed_Cluster"; public static final String CLUSTER_METRIC_HEALTH_CHECK_TOTAL_CLUSTER = "HealthCheckTotal_Cluster"; + /** + * connector健康指标 + */ + public static final String CLUSTER_METRIC_HEALTH_STATE_CONNECTOR = "HealthState_Connector"; + public static final String CLUSTER_METRIC_HEALTH_CHECK_PASSED_CONNECTOR = "HealthCheckPassed_Connector"; + public static final String CLUSTER_METRIC_HEALTH_CHECK_TOTAL_CONNECTOR = "HealthCheckTotal_Connector"; + public static final String CLUSTER_METRIC_TOTAL_REQ_QUEUE_SIZE = "TotalRequestQueueSize"; public static final String CLUSTER_METRIC_TOTAL_RES_QUEUE_SIZE = "TotalResponseQueueSize"; public static final String CLUSTER_METRIC_EVENT_QUEUE_SIZE = "EventQueueSize"; diff --git a/km-extends/km-monitor/src/main/java/com/xiaojukeji/know/streaming/km/monitor/common/AbstractMonitorSinkTag.java b/km-extends/km-monitor/src/main/java/com/xiaojukeji/know/streaming/km/monitor/common/AbstractMonitorSinkTag.java deleted file mode 100644 index dba9e12c..00000000 --- a/km-extends/km-monitor/src/main/java/com/xiaojukeji/know/streaming/km/monitor/common/AbstractMonitorSinkTag.java +++ /dev/null @@ -1,14 +0,0 @@ -package com.xiaojukeji.know.streaming.km.monitor.common; - -import java.util.Map; - -/** - * @author zengqiao - * @date 20/5/24 - */ -public abstract class AbstractMonitorSinkTag { - - public abstract String convert2Tags(); - - public abstract Map tagsMap(); -} \ No newline at end of file diff --git a/km-extends/km-monitor/src/main/java/com/xiaojukeji/know/streaming/km/monitor/component/AbstractMonitorSinkService.java b/km-extends/km-monitor/src/main/java/com/xiaojukeji/know/streaming/km/monitor/component/AbstractMonitorSinkService.java index 47288792..1fccaf90 100644 --- a/km-extends/km-monitor/src/main/java/com/xiaojukeji/know/streaming/km/monitor/component/AbstractMonitorSinkService.java +++ b/km-extends/km-monitor/src/main/java/com/xiaojukeji/know/streaming/km/monitor/component/AbstractMonitorSinkService.java @@ -4,7 +4,7 @@ import com.didiglobal.logi.log.ILog; import com.didiglobal.logi.log.LogFactory; import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.*; import com.xiaojukeji.know.streaming.km.common.bean.event.metric.*; -import com.xiaojukeji.know.streaming.km.common.utils.NamedThreadFactory; +import com.xiaojukeji.know.streaming.km.common.utils.FutureUtil; import com.xiaojukeji.know.streaming.km.monitor.common.MetricSinkPoint; import org.springframework.context.ApplicationListener; @@ -12,9 +12,6 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.LinkedBlockingDeque; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; import static com.xiaojukeji.know.streaming.km.monitor.common.MonitorSinkTagEnum.*; @@ -23,10 +20,12 @@ public abstract class AbstractMonitorSinkService implements ApplicationListener< private static final int STEP = 60; - private ThreadPoolExecutor executor = new ThreadPoolExecutor(5, 10, 6000, TimeUnit.MILLISECONDS, - new LinkedBlockingDeque<>(1000), - new NamedThreadFactory("KM-Monitor-Sink-" + monitorName()), - (r, e) -> LOGGER.warn("class=AbstractMonitorSinkService||msg=Deque is blocked, taskCount:{}" + e.getTaskCount())); + private FutureUtil sinkTP = FutureUtil.init( + "SinkMetricsTP", + 5, + 5, + 10000 + ); /** * monitor 服务的名称 @@ -36,7 +35,7 @@ public abstract class AbstractMonitorSinkService implements ApplicationListener< @Override public void onApplicationEvent(BaseMetricEvent event) { - executor.execute( () -> { + sinkTP.submitTask(() -> { if (event instanceof BrokerMetricEvent) { BrokerMetricEvent brokerMetricEvent = (BrokerMetricEvent)event; sinkMetrics(brokerMetric2SinkPoint(brokerMetricEvent.getBrokerMetrics())); @@ -194,10 +193,10 @@ public abstract class AbstractMonitorSinkService implements ApplicationListener< Map tagsMap) { List pointList = new ArrayList<>(); - for(String metricName : metrics.keySet()){ + for(Map.Entry entry: metrics.entrySet()){ MetricSinkPoint metricSinkPoint = new MetricSinkPoint(); - metricSinkPoint.setName(metricPre + "_" + metricName); - metricSinkPoint.setValue(metrics.get(metricName)); + metricSinkPoint.setName(metricPre + "_" + entry.getKey()); + metricSinkPoint.setValue(entry.getValue()); metricSinkPoint.setTimestamp(timeStamp); metricSinkPoint.setStep(STEP); metricSinkPoint.setTagsMap(tagsMap); diff --git a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/AbstractConnectClusterChangeHandler.java b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/AbstractConnectClusterChangeHandler.java new file mode 100644 index 00000000..056ac89c --- /dev/null +++ b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/AbstractConnectClusterChangeHandler.java @@ -0,0 +1,44 @@ +package com.xiaojukeji.know.streaming.km.persistence.connect; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.event.cluster.connect.ConnectClusterLoadChangedEvent; +import org.springframework.context.ApplicationListener; + +import java.util.concurrent.locks.ReentrantLock; + +/** + * @author wyb + * @date 2022/11/7 + */ +public abstract class AbstractConnectClusterChangeHandler implements ApplicationListener { + + private static final ILog log = LogFactory.getLog(AbstractConnectClusterChangeHandler.class); + + protected final ReentrantLock modifyClientMapLock = new ReentrantLock(); + + protected abstract void add(ConnectCluster connectCluster); + + protected abstract void modify(ConnectCluster newConnectCluster, ConnectCluster oldConnectCluster); + + protected abstract void remove(ConnectCluster connectCluster); + + + @Override + public void onApplicationEvent(ConnectClusterLoadChangedEvent event) { + switch (event.getOperationEnum()) { + case ADD: + this.add(event.getInDBConnectCluster()); + break; + case EDIT: + this.modify(event.getInDBConnectCluster(), event.getInCacheConnectCluster()); + break; + case DELETE: + this.remove(event.getInCacheConnectCluster()); + break; + default: + log.error("method=onApplicationEvent||event={}||msg=illegal event", event); + } + } +} diff --git a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/ConnectJMXClient.java b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/ConnectJMXClient.java new file mode 100644 index 00000000..727ad7f6 --- /dev/null +++ b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/ConnectJMXClient.java @@ -0,0 +1,146 @@ +package com.xiaojukeji.know.streaming.km.persistence.connect; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.common.bean.entity.config.JmxConfig; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectWorker; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectWorkerPO; +import com.xiaojukeji.know.streaming.km.common.jmx.JmxConnectorWrap; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils; +import com.xiaojukeji.know.streaming.km.persistence.connect.cache.LoadedConnectClusterCache; +import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.ConnectWorkerDAO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * @author wyb + * @date 2022/10/31 + */ +@Component +public class ConnectJMXClient extends AbstractConnectClusterChangeHandler { + private static final ILog log = LogFactory.getLog(ConnectJMXClient.class); + + private static final Map> JMX_MAP = new ConcurrentHashMap<>(); + + @Autowired + private ConnectWorkerDAO connectWorkerDAO; + + + public JmxConnectorWrap getClientWithCheck(Long connectClusterId, String workerId) { + JmxConnectorWrap jmxConnectorWrap = this.getClient(connectClusterId, workerId); + + if (ValidateUtils.isNull(jmxConnectorWrap) || !jmxConnectorWrap.checkJmxConnectionAndInitIfNeed()) { + log.error("method=getClientWithCheck||connectClusterId={}||workerId={}||msg=get jmx connector failed!", connectClusterId, workerId); + return null; + } + + return jmxConnectorWrap; + } + + public JmxConnectorWrap getClient(Long connectorClusterId, String workerId) { + Map jmxMap = JMX_MAP.getOrDefault(connectorClusterId, new ConcurrentHashMap<>()); + + JmxConnectorWrap jmxConnectorWrap = jmxMap.get(workerId); + if (jmxConnectorWrap != null) { + // 已新建成功,则直接返回 + return jmxConnectorWrap; + } + + // 未创建,则进行创建 + return this.createJmxConnectorWrap(connectorClusterId, workerId); + } + + private JmxConnectorWrap createJmxConnectorWrap(Long connectorClusterId, String workerId) { + ConnectCluster connectCluster = LoadedConnectClusterCache.getByPhyId(connectorClusterId); + if (connectCluster == null) { + return null; + } + return this.createJmxConnectorWrap(connectCluster, workerId); + } + + private JmxConnectorWrap createJmxConnectorWrap(ConnectCluster connectCluster, String workerId) { + ConnectWorker connectWorker = this.getConnectWorkerFromDB(connectCluster.getId(), workerId); + if (connectWorker == null) { + return null; + } + + try { + modifyClientMapLock.lock(); + + JmxConnectorWrap jmxConnectorWrap = JMX_MAP.getOrDefault(connectCluster.getId(), new ConcurrentHashMap<>()).get(workerId); + if (jmxConnectorWrap != null) { + return jmxConnectorWrap; + } + + log.debug("method=createJmxConnectorWrap||connectClusterId={}||workerId={}||msg=create JmxConnectorWrap starting", connectCluster.getId(), workerId); + + JmxConfig jmxConfig = ConvertUtil.str2ObjByJson(connectCluster.getJmxProperties(), JmxConfig.class); + if (jmxConfig == null) { + jmxConfig = new JmxConfig(); + } + + + jmxConnectorWrap = new JmxConnectorWrap( + "connectClusterId: " + connectCluster.getId() + " workerId: " + workerId, + null, + connectWorker.getHost(), + connectWorker.getJmxPort() != null ? connectWorker.getJmxPort() : jmxConfig.getJmxPort(), + jmxConfig + ); + + Map workerMap = JMX_MAP.getOrDefault(connectCluster.getId(), new ConcurrentHashMap<>()); + workerMap.put(workerId, jmxConnectorWrap); + JMX_MAP.put(connectCluster.getId(), workerMap); + return jmxConnectorWrap; + } catch (Exception e) { + log.debug("method=createJmxConnectorWrap||connectClusterId={}||workerId={}||msg=create JmxConnectorWrap failed||errMsg=exception||", connectCluster.getId(), workerId, e); + } finally { + modifyClientMapLock.unlock(); + } + return null; + } + + + private ConnectWorker getConnectWorkerFromDB(Long connectorClusterId, String workerId) { + LambdaQueryWrapper lambdaQueryWrapper = new LambdaQueryWrapper<>(); + lambdaQueryWrapper.eq(ConnectWorkerPO::getConnectClusterId, connectorClusterId); + lambdaQueryWrapper.eq(ConnectWorkerPO::getWorkerId, workerId); + ConnectWorkerPO connectWorkerPO = connectWorkerDAO.selectOne(lambdaQueryWrapper); + if (connectWorkerPO == null) { + return null; + } + return ConvertUtil.obj2Obj(connectWorkerPO, ConnectWorker.class); + } + + + @Override + protected void add(ConnectCluster connectCluster) { + JMX_MAP.putIfAbsent(connectCluster.getId(), new ConcurrentHashMap<>()); + } + + @Override + protected void modify(ConnectCluster newConnectCluster, ConnectCluster oldConnectCluster) { + if (newConnectCluster.getJmxProperties().equals(oldConnectCluster.getJmxProperties())) { + return; + } + this.remove(newConnectCluster); + this.add(newConnectCluster); + } + + @Override + protected void remove(ConnectCluster connectCluster) { + Map jmxMap = JMX_MAP.remove(connectCluster.getId()); + if (jmxMap == null) { + return; + } + for (JmxConnectorWrap jmxConnectorWrap : jmxMap.values()) { + jmxConnectorWrap.close(); + } + } +} diff --git a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/cache/LoadedConnectClusterCache.java b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/cache/LoadedConnectClusterCache.java new file mode 100644 index 00000000..bdc5bf1d --- /dev/null +++ b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/cache/LoadedConnectClusterCache.java @@ -0,0 +1,37 @@ +package com.xiaojukeji.know.streaming.km.persistence.connect.cache; + +import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * @author wyb + * @date 2022/11/7 + */ +public class LoadedConnectClusterCache { + private static final Map CONNECT_CLUSTER_MAP = new ConcurrentHashMap<>(); + + public static boolean containsByPhyId(Long connectClusterId) { + return CONNECT_CLUSTER_MAP.containsKey(connectClusterId); + } + + public static ConnectCluster getByPhyId(Long connectClusterId) { + return CONNECT_CLUSTER_MAP.get(connectClusterId); + } + + public static ConnectCluster remove(Long connectClusterId) { + return CONNECT_CLUSTER_MAP.remove(connectClusterId); + } + + public static void replace(ConnectCluster connectCluster) { + CONNECT_CLUSTER_MAP.put(connectCluster.getId(), connectCluster); + } + + public static Map listAll() { + return CONNECT_CLUSTER_MAP; + } + + +} diff --git a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/schedule/ScheduleFlushConnectClusterTask.java b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/schedule/ScheduleFlushConnectClusterTask.java new file mode 100644 index 00000000..b19ebf78 --- /dev/null +++ b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/connect/schedule/ScheduleFlushConnectClusterTask.java @@ -0,0 +1,104 @@ +package com.xiaojukeji.know.streaming.km.persistence.connect.schedule; + +import com.didiglobal.logi.log.ILog; +import com.didiglobal.logi.log.LogFactory; +import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster; +import com.xiaojukeji.know.streaming.km.common.bean.event.cluster.connect.ConnectClusterLoadChangedEvent; +import com.xiaojukeji.know.streaming.km.common.component.SpringTool; +import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.OperationEnum; +import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil; +import com.xiaojukeji.know.streaming.km.persistence.connect.cache.LoadedConnectClusterCache; +import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.ConnectClusterDAO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * @author wyb + * @date 2022/11/7 + */ +@Component +public class ScheduleFlushConnectClusterTask { + private static final ILog log = LogFactory.getLog(ScheduleFlushConnectClusterTask.class); + + @Autowired + private ConnectClusterDAO connectClusterDAO; + + private final BlockingQueue eventQueue = new LinkedBlockingQueue<>(2000); + + private final Thread handleEventThread = new Thread(() -> handleEvent(), "ScheduleFlushConnectClusterTask"); + + @PostConstruct + public void init() { + // 启动线程 + handleEventThread.start(); + + // 立即加载集群 + flush(); + } + + @Scheduled(cron="0/10 * * * * ?") + public void flush() { + List inDBConnectClusterList = ConvertUtil.list2List(connectClusterDAO.selectList(null), ConnectCluster.class); + Map inDBConnectClusterMap = inDBConnectClusterList.stream().collect(Collectors.toMap(ConnectCluster::getId, Function.identity())); + + //排查新增 + for (ConnectCluster inDBConnectCluster : inDBConnectClusterList) { + ConnectCluster inCacheConnectCluster = LoadedConnectClusterCache.getByPhyId(inDBConnectCluster.getId()); + //存在,查看是否需要替换 + if (inCacheConnectCluster != null) { + if (inCacheConnectCluster.equals(inDBConnectCluster)) { + continue; + } + LoadedConnectClusterCache.replace(inCacheConnectCluster); + this.put2Queue(new ConnectClusterLoadChangedEvent(this, inDBConnectCluster, inCacheConnectCluster, OperationEnum.EDIT)); + + } else { + LoadedConnectClusterCache.replace(inDBConnectCluster); + this.put2Queue(new ConnectClusterLoadChangedEvent(this, inDBConnectCluster, null, OperationEnum.ADD)); + } + + } + + //排查删除 + for (ConnectCluster inCacheConnectCluster : LoadedConnectClusterCache.listAll().values()) { + if (inDBConnectClusterMap.containsKey(inCacheConnectCluster.getId())) { + continue; + } + LoadedConnectClusterCache.remove(inCacheConnectCluster.getId()); + this.put2Queue(new ConnectClusterLoadChangedEvent(this, null, inCacheConnectCluster, OperationEnum.DELETE)); + } + + } + + + private void put2Queue(ConnectClusterLoadChangedEvent event) { + try { + eventQueue.put(event); + } catch (Exception e) { + log.error("method=put2Queue||event={}||errMsg=exception", event, e); + } + } + + + + + private void handleEvent() { + while (true) { + try { + ConnectClusterLoadChangedEvent event = eventQueue.take(); + SpringTool.publish(event); + } catch (Exception e) { + log.error("method=handleEvent||errMsg=exception", e); + } + } + } +} diff --git a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/jmx/impl/JmxDAOImpl.java b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/jmx/impl/JmxDAOImpl.java index e95d3c43..ec261b18 100644 --- a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/jmx/impl/JmxDAOImpl.java +++ b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/jmx/impl/JmxDAOImpl.java @@ -26,7 +26,7 @@ public class JmxDAOImpl implements JmxDAO { public Object getJmxValue(Long clusterPhyId, String jmxHost, Integer jmxPort, JmxConfig jmxConfig, ObjectName objectName, String attribute) { JmxConnectorWrap jmxConnectorWrap = null; try { - jmxConnectorWrap = new JmxConnectorWrap(clusterPhyId, null, null, jmxHost, jmxPort, jmxConfig); + jmxConnectorWrap = new JmxConnectorWrap("clusterPhyId: " + clusterPhyId, null, jmxHost, jmxPort, jmxConfig); if (!jmxConnectorWrap.checkJmxConnectionAndInitIfNeed()) { log.error( "method=getJmxValue||clusterPhyId={}||jmxHost={}||jmxPort={}||jmxConfig={}||errMgs=create jmx client failed", diff --git a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/kafka/KafkaJMXClient.java b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/kafka/KafkaJMXClient.java index 11e5ae36..1ace6742 100644 --- a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/kafka/KafkaJMXClient.java +++ b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/kafka/KafkaJMXClient.java @@ -159,8 +159,7 @@ public class KafkaJMXClient extends AbstractClusterLoadedChangedHandler { } JmxConnectorWrap jmxConnectorWrap = new JmxConnectorWrap( - clusterPhy.getId(), - brokerId, + "clusterPhyId: " + clusterPhy.getId() + " brokerId: " + brokerId, broker.getStartTimestamp(), jmxConfig != null ? broker.getJmxHost(jmxConfig.getUseWhichEndpoint()) : broker.getHost(), broker.getJmxPort() != null ? broker.getJmxPort() : jmxConfig.getJmxPort(), diff --git a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/ConnectClusterDAO.java b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/ConnectClusterDAO.java new file mode 100644 index 00000000..fae91cbe --- /dev/null +++ b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/ConnectClusterDAO.java @@ -0,0 +1,9 @@ +package com.xiaojukeji.know.streaming.km.persistence.mysql.connect; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectClusterPO; +import org.springframework.stereotype.Repository; + +@Repository +public interface ConnectClusterDAO extends BaseMapper { +} diff --git a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/ConnectWorkerDAO.java b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/ConnectWorkerDAO.java new file mode 100644 index 00000000..edb50869 --- /dev/null +++ b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/ConnectWorkerDAO.java @@ -0,0 +1,9 @@ +package com.xiaojukeji.know.streaming.km.persistence.mysql.connect; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectWorkerPO; +import org.springframework.stereotype.Repository; + +@Repository +public interface ConnectWorkerDAO extends BaseMapper { +} diff --git a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/ConnectorDAO.java b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/ConnectorDAO.java new file mode 100644 index 00000000..69d228ba --- /dev/null +++ b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/ConnectorDAO.java @@ -0,0 +1,9 @@ +package com.xiaojukeji.know.streaming.km.persistence.mysql.connect; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO; +import org.springframework.stereotype.Repository; + +@Repository +public interface ConnectorDAO extends BaseMapper { +} diff --git a/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/WorkerConnectorDAO.java b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/WorkerConnectorDAO.java new file mode 100644 index 00000000..fee9f872 --- /dev/null +++ b/km-persistence/src/main/java/com/xiaojukeji/know/streaming/km/persistence/mysql/connect/WorkerConnectorDAO.java @@ -0,0 +1,9 @@ +package com.xiaojukeji.know.streaming.km.persistence.mysql.connect; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.xiaojukeji.know.streaming.km.common.bean.po.connect.WorkerConnectorPO; +import org.springframework.stereotype.Repository; + +@Repository +public interface WorkerConnectorDAO extends BaseMapper { +} diff --git a/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/health/KafkaHealthController.java b/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/health/KafkaHealthController.java index d4fc8094..1890a1db 100644 --- a/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/health/KafkaHealthController.java +++ b/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/health/KafkaHealthController.java @@ -17,6 +17,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; /** @@ -30,6 +31,14 @@ public class KafkaHealthController { @Autowired private HealthStateService healthStateService; + private List allDimensionCodeList = new ArrayList() { + { + for (HealthCheckDimensionEnum dimensionEnum : HealthCheckDimensionEnum.values()) { + add(dimensionEnum.getDimension()); + } + } + }; + @Autowired private HealthCheckResultService healthCheckResultService; @@ -40,11 +49,21 @@ public class KafkaHealthController { @RequestParam(required = false) Integer dimensionCode) { HealthCheckDimensionEnum dimensionEnum = HealthCheckDimensionEnum.getByCode(dimensionCode); if (!dimensionEnum.equals(HealthCheckDimensionEnum.UNKNOWN)) { - return Result.buildSuc(HealthScoreVOConverter.convert2HealthScoreResultDetailVOList(healthStateService.getDimensionHealthResult(clusterPhyId, dimensionEnum))); + return Result.buildSuc(HealthScoreVOConverter.convert2HealthScoreResultDetailVOList(healthStateService.getDimensionHealthResult(clusterPhyId, Arrays.asList(dimensionCode)))); } return Result.buildSuc(HealthScoreVOConverter.convert2HealthScoreResultDetailVOList( - healthStateService.getClusterHealthResult(clusterPhyId) + healthStateService.getDimensionHealthResult(clusterPhyId, allDimensionCodeList) + )); + } + + @ApiOperation(value = "集群-健康检查详情") + @PostMapping(value = "clusters/{clusterPhyId}/health-detail") + @ResponseBody + public Result> getClusterHealthCheckResultDetail(@PathVariable Long clusterPhyId, + @RequestBody List dimensionCodeList) { + return Result.buildSuc(HealthScoreVOConverter.convert2HealthScoreResultDetailVOList( + healthStateService.getDimensionHealthResult(clusterPhyId, dimensionCodeList) )); } @@ -55,7 +74,7 @@ public class KafkaHealthController { @PathVariable Integer dimensionCode, @PathVariable String resName) { return Result.buildSuc(HealthScoreVOConverter.convert2HealthScoreBaseResultVOList( - healthStateService.getResHealthResult(clusterPhyId, dimensionCode, resName) + healthStateService.getResHealthResult(clusterPhyId, clusterPhyId, dimensionCode, resName) )); } diff --git a/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/version/VersionController.java b/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/version/VersionController.java index f8e00430..986fd825 100644 --- a/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/version/VersionController.java +++ b/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/version/VersionController.java @@ -35,7 +35,19 @@ public class VersionController { @GetMapping(value = "support-kafka-versions") @ResponseBody public Result> listAllVersions() { - Result> rm = versionControlManager.listAllVersions(); + Result> rm = versionControlManager.listAllKafkaVersions(); + if (rm.failed()) { + return Result.buildFromIgnoreData(rm); + } + + return Result.buildSuc(new TreeMap<>(rm.getData())); + } + + @ApiOperation(value = "支持的kafka-Connect版本列表", notes = "") + @GetMapping(value = "support-kafka-connect-versions") + @ResponseBody + public Result> listAllConnectVersions() { + Result> rm = versionControlManager.listAllKafkaVersions(); if (rm.failed()) { return Result.buildFromIgnoreData(rm); } @@ -54,7 +66,7 @@ public class VersionController { @GetMapping(value = "clusters/{clusterId}/types/{type}/support-kafka-versions") @ResponseBody public Result> listClusterVersionControlItem(@PathVariable Long clusterId, @PathVariable Integer type) { - return versionControlManager.listClusterVersionControlItem(clusterId, type); + return versionControlManager.listKafkaClusterVersionControlItem(clusterId, type); } @ApiOperation(value = "用户设置的指标显示项", notes = "") diff --git a/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/zk/ZookeeperMetricsController.java b/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/zk/ZookeeperMetricsController.java index bb2ea098..d481e1de 100644 --- a/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/zk/ZookeeperMetricsController.java +++ b/km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/api/v3/zk/ZookeeperMetricsController.java @@ -10,8 +10,6 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.ZookeeperMetr import com.xiaojukeji.know.streaming.km.core.service.zookeeper.ZookeeperMetricService; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; @@ -26,8 +24,6 @@ import java.util.List; @RestController @RequestMapping(ApiPrefix.API_V3_PREFIX) public class ZookeeperMetricsController { - private static final Logger LOGGER = LoggerFactory.getLogger(ZookeeperMetricsController.class); - @Autowired private ZookeeperMetricService zookeeperMetricService; diff --git a/km-task/src/main/java/com/xiaojukeji/know/streaming/km/task/kafka/metadata/SyncKafkaGroupTask.java b/km-task/src/main/java/com/xiaojukeji/know/streaming/km/task/kafka/metadata/SyncKafkaGroupTask.java index b75ab3d9..521e1f84 100644 --- a/km-task/src/main/java/com/xiaojukeji/know/streaming/km/task/kafka/metadata/SyncKafkaGroupTask.java +++ b/km-task/src/main/java/com/xiaojukeji/know/streaming/km/task/kafka/metadata/SyncKafkaGroupTask.java @@ -34,7 +34,7 @@ public class SyncKafkaGroupTask extends AbstractAsyncMetadataDispatchTask { @Override public TaskResult processClusterTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) throws Exception { // 获取集群的Group列表 - List groupNameList = groupService.listGroupsFromKafka(clusterPhy.getId()); + List groupNameList = groupService.listGroupsFromKafka(clusterPhy); TaskResult allSuccess = TaskResult.SUCCESS; @@ -42,7 +42,7 @@ public class SyncKafkaGroupTask extends AbstractAsyncMetadataDispatchTask { List groupList = new ArrayList<>(); for (String groupName : groupNameList) { try { - Group group = groupService.getGroupFromKafka(clusterPhy.getId(), groupName); + Group group = groupService.getGroupFromKafka(clusterPhy, groupName); if (group == null) { continue; }