mirror of
https://github.com/didi/KnowStreaming.git
synced 2025-12-24 03:42:07 +08:00
[Optimize]日志统一格式&优化输出内容-part2(#800)
This commit is contained in:
@@ -1,25 +1,53 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.metric;
|
||||
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.collector.service.CollectThreadPoolService;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.LoggerUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.BaseMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.component.SpringTool;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
||||
/**
|
||||
* @author didi
|
||||
*/
|
||||
public abstract class AbstractMetricCollector<T> {
|
||||
public abstract void collectMetrics(ClusterPhy clusterPhy);
|
||||
protected static final ILog LOGGER = LogFactory.getLog(AbstractMetricCollector.class);
|
||||
|
||||
protected static final ILog METRIC_COLLECTED_LOGGER = LoggerUtil.getMetricCollectedLogger();
|
||||
|
||||
public abstract List<T> collectKafkaMetrics(ClusterPhy clusterPhy);
|
||||
|
||||
public abstract VersionItemTypeEnum collectorType();
|
||||
|
||||
@Autowired
|
||||
private CollectThreadPoolService collectThreadPoolService;
|
||||
|
||||
public void collectMetrics(ClusterPhy clusterPhy) {
|
||||
long startTime = System.currentTimeMillis();
|
||||
|
||||
// 采集指标
|
||||
List<T> metricsList = this.collectKafkaMetrics(clusterPhy);
|
||||
|
||||
// 输出耗时信息
|
||||
LOGGER.info(
|
||||
"metricType={}||clusterPhyId={}||costTimeUnitMs={}",
|
||||
this.collectorType().getMessage(), clusterPhy.getId(), System.currentTimeMillis() - startTime
|
||||
);
|
||||
|
||||
// 输出采集到的指标信息
|
||||
METRIC_COLLECTED_LOGGER.debug("metricType={}||clusterPhyId={}||metrics={}!",
|
||||
this.collectorType().getMessage(), clusterPhy.getId(), ConvertUtil.obj2Json(metricsList)
|
||||
);
|
||||
}
|
||||
|
||||
protected FutureWaitUtil<Void> getFutureUtilByClusterPhyId(Long clusterPhyId) {
|
||||
return collectThreadPoolService.selectSuitableFutureUtil(clusterPhyId * 1000L + this.collectorType().getCode());
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.metric.kafka;
|
||||
|
||||
import com.alibaba.fastjson.JSON;
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector;
|
||||
@@ -12,7 +11,6 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionContro
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.BrokerMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerMetricService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService;
|
||||
@@ -30,7 +28,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT
|
||||
*/
|
||||
@Component
|
||||
public class BrokerMetricCollector extends AbstractMetricCollector<BrokerMetrics> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
private static final ILog LOGGER = LogFactory.getLog(BrokerMetricCollector.class);
|
||||
|
||||
@Autowired
|
||||
private VersionControlService versionControlService;
|
||||
@@ -42,8 +40,7 @@ public class BrokerMetricCollector extends AbstractMetricCollector<BrokerMetrics
|
||||
private BrokerService brokerService;
|
||||
|
||||
@Override
|
||||
public void collectMetrics(ClusterPhy clusterPhy) {
|
||||
Long startTime = System.currentTimeMillis();
|
||||
public List<BrokerMetrics> collectKafkaMetrics(ClusterPhy clusterPhy) {
|
||||
Long clusterPhyId = clusterPhy.getId();
|
||||
|
||||
List<Broker> brokers = brokerService.listAliveBrokersFromDB(clusterPhy.getId());
|
||||
@@ -51,23 +48,23 @@ public class BrokerMetricCollector extends AbstractMetricCollector<BrokerMetrics
|
||||
|
||||
FutureWaitUtil<Void> future = this.getFutureUtilByClusterPhyId(clusterPhyId);
|
||||
|
||||
List<BrokerMetrics> brokerMetrics = new ArrayList<>();
|
||||
List<BrokerMetrics> metricsList = new ArrayList<>();
|
||||
for(Broker broker : brokers) {
|
||||
BrokerMetrics metrics = new BrokerMetrics(clusterPhyId, broker.getBrokerId(), broker.getHost(), broker.getPort());
|
||||
brokerMetrics.add(metrics);
|
||||
metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, Constant.COLLECT_METRICS_ERROR_COST_TIME);
|
||||
metricsList.add(metrics);
|
||||
|
||||
future.runnableTask(
|
||||
String.format("method=BrokerMetricCollector||clusterPhyId=%d||brokerId=%d", clusterPhyId, broker.getBrokerId()),
|
||||
String.format("class=BrokerMetricCollector||clusterPhyId=%d||brokerId=%d", clusterPhyId, broker.getBrokerId()),
|
||||
30000,
|
||||
() -> collectMetrics(clusterPhyId, metrics, items)
|
||||
);
|
||||
}
|
||||
|
||||
future.waitExecute(30000);
|
||||
this.publishMetric(new BrokerMetricEvent(this, brokerMetrics));
|
||||
this.publishMetric(new BrokerMetricEvent(this, metricsList));
|
||||
|
||||
LOGGER.info("method=BrokerMetricCollector||clusterPhyId={}||startTime={}||costTime={}||msg=collect finished.",
|
||||
clusterPhyId, startTime, System.currentTimeMillis() - startTime);
|
||||
return metricsList;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -79,7 +76,6 @@ public class BrokerMetricCollector extends AbstractMetricCollector<BrokerMetrics
|
||||
|
||||
private void collectMetrics(Long clusterPhyId, BrokerMetrics metrics, List<VersionControlItem> items) {
|
||||
long startTime = System.currentTimeMillis();
|
||||
metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, Constant.COLLECT_METRICS_ERROR_COST_TIME);
|
||||
|
||||
for(VersionControlItem v : items) {
|
||||
try {
|
||||
@@ -93,14 +89,11 @@ public class BrokerMetricCollector extends AbstractMetricCollector<BrokerMetrics
|
||||
}
|
||||
|
||||
metrics.putMetric(ret.getData().getMetrics());
|
||||
|
||||
if(!EnvUtil.isOnline()){
|
||||
LOGGER.info("method=BrokerMetricCollector||clusterId={}||brokerId={}||metric={}||metric={}!",
|
||||
clusterPhyId, metrics.getBrokerId(), v.getName(), JSON.toJSONString(ret.getData().getMetrics()));
|
||||
}
|
||||
} catch (Exception e){
|
||||
LOGGER.error("method=BrokerMetricCollector||clusterId={}||brokerId={}||metric={}||errMsg=exception!",
|
||||
clusterPhyId, metrics.getBrokerId(), v.getName(), e);
|
||||
LOGGER.error(
|
||||
"method=collectMetrics||clusterPhyId={}||brokerId={}||metricName={}||errMsg=exception!",
|
||||
clusterPhyId, metrics.getBrokerId(), v.getName(), e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -8,18 +8,15 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.ClusterMetric
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionControlItem;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.ClusterMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.ClusterMetricPO;
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterMetricService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.version.VersionControlService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.METRIC_CLUSTER;
|
||||
@@ -28,8 +25,8 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT
|
||||
* @author didi
|
||||
*/
|
||||
@Component
|
||||
public class ClusterMetricCollector extends AbstractMetricCollector<ClusterMetricPO> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
public class ClusterMetricCollector extends AbstractMetricCollector<ClusterMetrics> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog(ClusterMetricCollector.class);
|
||||
|
||||
@Autowired
|
||||
private VersionControlService versionControlService;
|
||||
@@ -38,35 +35,37 @@ public class ClusterMetricCollector extends AbstractMetricCollector<ClusterMetri
|
||||
private ClusterMetricService clusterMetricService;
|
||||
|
||||
@Override
|
||||
public void collectMetrics(ClusterPhy clusterPhy) {
|
||||
public List<ClusterMetrics> collectKafkaMetrics(ClusterPhy clusterPhy) {
|
||||
Long startTime = System.currentTimeMillis();
|
||||
Long clusterPhyId = clusterPhy.getId();
|
||||
List<VersionControlItem> items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode());
|
||||
|
||||
ClusterMetrics metrics = new ClusterMetrics(clusterPhyId, clusterPhy.getKafkaVersion());
|
||||
metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, Constant.COLLECT_METRICS_ERROR_COST_TIME);
|
||||
|
||||
FutureWaitUtil<Void> future = this.getFutureUtilByClusterPhyId(clusterPhyId);
|
||||
|
||||
for(VersionControlItem v : items) {
|
||||
future.runnableTask(
|
||||
String.format("method=ClusterMetricCollector||clusterPhyId=%d||metricName=%s", clusterPhyId, v.getName()),
|
||||
String.format("class=ClusterMetricCollector||clusterPhyId=%d", clusterPhyId),
|
||||
30000,
|
||||
() -> {
|
||||
try {
|
||||
if(null != metrics.getMetrics().get(v.getName())){return null;}
|
||||
if(null != metrics.getMetrics().get(v.getName())){
|
||||
return null;
|
||||
}
|
||||
|
||||
Result<ClusterMetrics> ret = clusterMetricService.collectClusterMetricsFromKafka(clusterPhyId, v.getName());
|
||||
if(null == ret || ret.failed() || null == ret.getData()){return null;}
|
||||
if(null == ret || ret.failed() || null == ret.getData()){
|
||||
return null;
|
||||
}
|
||||
|
||||
metrics.putMetric(ret.getData().getMetrics());
|
||||
|
||||
if(!EnvUtil.isOnline()){
|
||||
LOGGER.info("method=ClusterMetricCollector||clusterPhyId={}||metricName={}||metricValue={}",
|
||||
clusterPhyId, v.getName(), ConvertUtil.obj2Json(ret.getData().getMetrics()));
|
||||
}
|
||||
} catch (Exception e){
|
||||
LOGGER.error("method=ClusterMetricCollector||clusterPhyId={}||metricName={}||errMsg=exception!",
|
||||
clusterPhyId, v.getName(), e);
|
||||
LOGGER.error(
|
||||
"method=collectKafkaMetrics||clusterPhyId={}||metricName={}||errMsg=exception!",
|
||||
clusterPhyId, v.getName(), e
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
@@ -77,10 +76,9 @@ public class ClusterMetricCollector extends AbstractMetricCollector<ClusterMetri
|
||||
|
||||
metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, (System.currentTimeMillis() - startTime) / 1000.0f);
|
||||
|
||||
publishMetric(new ClusterMetricEvent(this, Arrays.asList(metrics)));
|
||||
publishMetric(new ClusterMetricEvent(this, Collections.singletonList(metrics)));
|
||||
|
||||
LOGGER.info("method=ClusterMetricCollector||clusterPhyId={}||startTime={}||costTime={}||msg=msg=collect finished.",
|
||||
clusterPhyId, startTime, System.currentTimeMillis() - startTime);
|
||||
return Collections.singletonList(metrics);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.metric.kafka;
|
||||
|
||||
import com.alibaba.fastjson.JSON;
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector;
|
||||
@@ -11,20 +10,16 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionContro
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.GroupMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.group.GroupMetricService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.group.GroupService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.version.VersionControlService;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.kafka.common.TopicPartition;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.METRIC_GROUP;
|
||||
@@ -33,8 +28,8 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT
|
||||
* @author didi
|
||||
*/
|
||||
@Component
|
||||
public class GroupMetricCollector extends AbstractMetricCollector<List<GroupMetrics>> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
public class GroupMetricCollector extends AbstractMetricCollector<GroupMetrics> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog(GroupMetricCollector.class);
|
||||
|
||||
@Autowired
|
||||
private VersionControlService versionControlService;
|
||||
@@ -46,40 +41,38 @@ public class GroupMetricCollector extends AbstractMetricCollector<List<GroupMetr
|
||||
private GroupService groupService;
|
||||
|
||||
@Override
|
||||
public void collectMetrics(ClusterPhy clusterPhy) {
|
||||
Long startTime = System.currentTimeMillis();
|
||||
public List<GroupMetrics> collectKafkaMetrics(ClusterPhy clusterPhy) {
|
||||
Long clusterPhyId = clusterPhy.getId();
|
||||
|
||||
List<String> groups = new ArrayList<>();
|
||||
List<String> groupNameList = new ArrayList<>();
|
||||
try {
|
||||
groups = groupService.listGroupsFromKafka(clusterPhyId);
|
||||
groupNameList = groupService.listGroupsFromKafka(clusterPhyId);
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("method=GroupMetricCollector||clusterPhyId={}||msg=exception!", clusterPhyId, e);
|
||||
LOGGER.error("method=collectKafkaMetrics||clusterPhyId={}||msg=exception!", clusterPhyId, e);
|
||||
}
|
||||
|
||||
if(CollectionUtils.isEmpty(groups)){return;}
|
||||
if(ValidateUtils.isEmptyList(groupNameList)) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<VersionControlItem> items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode());
|
||||
|
||||
FutureWaitUtil<Void> future = this.getFutureUtilByClusterPhyId(clusterPhyId);
|
||||
|
||||
Map<String, List<GroupMetrics>> metricsMap = new ConcurrentHashMap<>();
|
||||
for(String groupName : groups) {
|
||||
for(String groupName : groupNameList) {
|
||||
future.runnableTask(
|
||||
String.format("method=GroupMetricCollector||clusterPhyId=%d||groupName=%s", clusterPhyId, groupName),
|
||||
String.format("class=GroupMetricCollector||clusterPhyId=%d||groupName=%s", clusterPhyId, groupName),
|
||||
30000,
|
||||
() -> collectMetrics(clusterPhyId, groupName, metricsMap, items));
|
||||
}
|
||||
|
||||
future.waitResult(30000);
|
||||
|
||||
List<GroupMetrics> metricsList = new ArrayList<>();
|
||||
metricsMap.values().forEach(elem -> metricsList.addAll(elem));
|
||||
List<GroupMetrics> metricsList = metricsMap.values().stream().collect(ArrayList::new, ArrayList::addAll, ArrayList::addAll);
|
||||
|
||||
publishMetric(new GroupMetricEvent(this, metricsList));
|
||||
|
||||
LOGGER.info("method=GroupMetricCollector||clusterPhyId={}||startTime={}||cost={}||msg=collect finished.",
|
||||
clusterPhyId, startTime, System.currentTimeMillis() - startTime);
|
||||
return metricsList;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -92,9 +85,7 @@ public class GroupMetricCollector extends AbstractMetricCollector<List<GroupMetr
|
||||
private void collectMetrics(Long clusterPhyId, String groupName, Map<String, List<GroupMetrics>> metricsMap, List<VersionControlItem> items) {
|
||||
long startTime = System.currentTimeMillis();
|
||||
|
||||
List<GroupMetrics> groupMetricsList = new ArrayList<>();
|
||||
|
||||
Map<String, GroupMetrics> tpGroupPOMap = new HashMap<>();
|
||||
Map<TopicPartition, GroupMetrics> subMetricMap = new HashMap<>();
|
||||
|
||||
GroupMetrics groupMetrics = new GroupMetrics(clusterPhyId, groupName, true);
|
||||
groupMetrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, Constant.COLLECT_METRICS_ERROR_COST_TIME);
|
||||
@@ -108,38 +99,31 @@ public class GroupMetricCollector extends AbstractMetricCollector<List<GroupMetr
|
||||
continue;
|
||||
}
|
||||
|
||||
ret.getData().stream().forEach(metrics -> {
|
||||
ret.getData().forEach(metrics -> {
|
||||
if (metrics.isBGroupMetric()) {
|
||||
groupMetrics.putMetric(metrics.getMetrics());
|
||||
} else {
|
||||
String topicName = metrics.getTopic();
|
||||
Integer partitionId = metrics.getPartitionId();
|
||||
String tpGroupKey = genTopicPartitionGroupKey(topicName, partitionId);
|
||||
|
||||
tpGroupPOMap.putIfAbsent(tpGroupKey, new GroupMetrics(clusterPhyId, partitionId, topicName, groupName, false));
|
||||
tpGroupPOMap.get(tpGroupKey).putMetric(metrics.getMetrics());
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
if(!EnvUtil.isOnline()){
|
||||
LOGGER.info("method=GroupMetricCollector||clusterPhyId={}||groupName={}||metricName={}||metricValue={}",
|
||||
clusterPhyId, groupName, metricName, JSON.toJSONString(ret.getData()));
|
||||
}
|
||||
}catch (Exception e){
|
||||
LOGGER.error("method=GroupMetricCollector||clusterPhyId={}||groupName={}||errMsg=exception!", clusterPhyId, groupName, e);
|
||||
TopicPartition tp = new TopicPartition(metrics.getTopic(), metrics.getPartitionId());
|
||||
subMetricMap.putIfAbsent(tp, new GroupMetrics(clusterPhyId, metrics.getPartitionId(), metrics.getTopic(), groupName, false));
|
||||
subMetricMap.get(tp).putMetric(metrics.getMetrics());
|
||||
});
|
||||
} catch (Exception e) {
|
||||
LOGGER.error(
|
||||
"method=collectMetrics||clusterPhyId={}||groupName={}||errMsg=exception!",
|
||||
clusterPhyId, groupName, e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
groupMetricsList.add(groupMetrics);
|
||||
groupMetricsList.addAll(tpGroupPOMap.values());
|
||||
List<GroupMetrics> metricsList = new ArrayList<>();
|
||||
metricsList.add(groupMetrics);
|
||||
metricsList.addAll(subMetricMap.values());
|
||||
|
||||
// 记录采集性能
|
||||
groupMetrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, (System.currentTimeMillis() - startTime) / 1000.0f);
|
||||
|
||||
metricsMap.put(groupName, groupMetricsList);
|
||||
}
|
||||
|
||||
private String genTopicPartitionGroupKey(String topic, Integer partitionId){
|
||||
return topic + "@" + partitionId;
|
||||
metricsMap.put(groupName, metricsList);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,8 +10,6 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.Topic;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionControlItem;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.PartitionMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionMetricService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService;
|
||||
@@ -29,7 +27,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT
|
||||
*/
|
||||
@Component
|
||||
public class PartitionMetricCollector extends AbstractMetricCollector<PartitionMetrics> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
protected static final ILog LOGGER = LogFactory.getLog(PartitionMetricCollector.class);
|
||||
|
||||
@Autowired
|
||||
private VersionControlService versionControlService;
|
||||
@@ -41,14 +39,11 @@ public class PartitionMetricCollector extends AbstractMetricCollector<PartitionM
|
||||
private TopicService topicService;
|
||||
|
||||
@Override
|
||||
public void collectMetrics(ClusterPhy clusterPhy) {
|
||||
Long startTime = System.currentTimeMillis();
|
||||
public List<PartitionMetrics> collectKafkaMetrics(ClusterPhy clusterPhy) {
|
||||
Long clusterPhyId = clusterPhy.getId();
|
||||
List<Topic> topicList = topicService.listTopicsFromCacheFirst(clusterPhyId);
|
||||
List<VersionControlItem> items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode());
|
||||
|
||||
// 获取集群所有分区
|
||||
|
||||
FutureWaitUtil<Void> future = this.getFutureUtilByClusterPhyId(clusterPhyId);
|
||||
|
||||
Map<String, Map<Integer, PartitionMetrics>> metricsMap = new ConcurrentHashMap<>();
|
||||
@@ -56,9 +51,9 @@ public class PartitionMetricCollector extends AbstractMetricCollector<PartitionM
|
||||
metricsMap.put(topic.getTopicName(), new ConcurrentHashMap<>());
|
||||
|
||||
future.runnableTask(
|
||||
String.format("method=PartitionMetricCollector||clusterPhyId=%d||topicName=%s", clusterPhyId, topic.getTopicName()),
|
||||
String.format("class=PartitionMetricCollector||clusterPhyId=%d||topicName=%s", clusterPhyId, topic.getTopicName()),
|
||||
30000,
|
||||
() -> collectMetrics(clusterPhyId, topic.getTopicName(), metricsMap.get(topic.getTopicName()), items)
|
||||
() -> this.collectMetrics(clusterPhyId, topic.getTopicName(), metricsMap.get(topic.getTopicName()), items)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -69,10 +64,7 @@ public class PartitionMetricCollector extends AbstractMetricCollector<PartitionM
|
||||
|
||||
this.publishMetric(new PartitionMetricEvent(this, metricsList));
|
||||
|
||||
LOGGER.info(
|
||||
"method=PartitionMetricCollector||clusterPhyId={}||startTime={}||costTime={}||msg=collect finished.",
|
||||
clusterPhyId, startTime, System.currentTimeMillis() - startTime
|
||||
);
|
||||
return metricsList;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -110,17 +102,9 @@ public class PartitionMetricCollector extends AbstractMetricCollector<PartitionM
|
||||
PartitionMetrics allMetrics = metricsMap.get(subMetrics.getPartitionId());
|
||||
allMetrics.putMetric(subMetrics.getMetrics());
|
||||
}
|
||||
|
||||
if (!EnvUtil.isOnline()) {
|
||||
LOGGER.info(
|
||||
"class=PartitionMetricCollector||method=collectMetrics||clusterPhyId={}||topicName={}||metricName={}||metricValue={}!",
|
||||
clusterPhyId, topicName, v.getName(), ConvertUtil.obj2Json(ret.getData())
|
||||
);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
LOGGER.info(
|
||||
"class=PartitionMetricCollector||method=collectMetrics||clusterPhyId={}||topicName={}||metricName={}||errMsg=exception",
|
||||
"method=collectMetrics||clusterPhyId={}||topicName={}||metricName={}||errMsg=exception",
|
||||
clusterPhyId, topicName, v.getName(), e
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.metric.kafka;
|
||||
|
||||
import com.alibaba.fastjson.JSON;
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.collector.metric.AbstractMetricCollector;
|
||||
@@ -12,7 +11,6 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionContro
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.ReplicaMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.replica.ReplicaMetricService;
|
||||
@@ -30,7 +28,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT
|
||||
*/
|
||||
@Component
|
||||
public class ReplicaMetricCollector extends AbstractMetricCollector<ReplicationMetrics> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
protected static final ILog LOGGER = LogFactory.getLog(ReplicaMetricCollector.class);
|
||||
|
||||
@Autowired
|
||||
private VersionControlService versionControlService;
|
||||
@@ -42,12 +40,10 @@ public class ReplicaMetricCollector extends AbstractMetricCollector<ReplicationM
|
||||
private PartitionService partitionService;
|
||||
|
||||
@Override
|
||||
public void collectMetrics(ClusterPhy clusterPhy) {
|
||||
Long startTime = System.currentTimeMillis();
|
||||
public List<ReplicationMetrics> collectKafkaMetrics(ClusterPhy clusterPhy) {
|
||||
Long clusterPhyId = clusterPhy.getId();
|
||||
List<VersionControlItem> items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode());
|
||||
|
||||
List<Partition> partitions = partitionService.listPartitionByCluster(clusterPhyId);
|
||||
List<Partition> partitions = partitionService.listPartitionFromCacheFirst(clusterPhyId);
|
||||
|
||||
FutureWaitUtil<Void> future = this.getFutureUtilByClusterPhyId(clusterPhyId);
|
||||
|
||||
@@ -55,10 +51,11 @@ public class ReplicaMetricCollector extends AbstractMetricCollector<ReplicationM
|
||||
for(Partition partition : partitions) {
|
||||
for (Integer brokerId: partition.getAssignReplicaList()) {
|
||||
ReplicationMetrics metrics = new ReplicationMetrics(clusterPhyId, partition.getTopicName(), brokerId, partition.getPartitionId());
|
||||
metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, Constant.COLLECT_METRICS_ERROR_COST_TIME);
|
||||
metricsList.add(metrics);
|
||||
|
||||
future.runnableTask(
|
||||
String.format("method=ReplicaMetricCollector||clusterPhyId=%d||brokerId=%d||topicName=%s||partitionId=%d",
|
||||
String.format("class=ReplicaMetricCollector||clusterPhyId=%d||brokerId=%d||topicName=%s||partitionId=%d",
|
||||
clusterPhyId, brokerId, partition.getTopicName(), partition.getPartitionId()),
|
||||
30000,
|
||||
() -> collectMetrics(clusterPhyId, metrics, items)
|
||||
@@ -70,8 +67,7 @@ public class ReplicaMetricCollector extends AbstractMetricCollector<ReplicationM
|
||||
|
||||
publishMetric(new ReplicaMetricEvent(this, metricsList));
|
||||
|
||||
LOGGER.info("method=ReplicaMetricCollector||clusterPhyId={}||startTime={}||costTime={}||msg=collect finished.",
|
||||
clusterPhyId, startTime, System.currentTimeMillis() - startTime);
|
||||
return metricsList;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -84,8 +80,6 @@ public class ReplicaMetricCollector extends AbstractMetricCollector<ReplicationM
|
||||
private ReplicationMetrics collectMetrics(Long clusterPhyId, ReplicationMetrics metrics, List<VersionControlItem> items) {
|
||||
long startTime = System.currentTimeMillis();
|
||||
|
||||
metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, Constant.COLLECT_METRICS_ERROR_COST_TIME);
|
||||
|
||||
for(VersionControlItem v : items) {
|
||||
try {
|
||||
if (metrics.getMetrics().containsKey(v.getName())) {
|
||||
@@ -105,15 +99,11 @@ public class ReplicaMetricCollector extends AbstractMetricCollector<ReplicationM
|
||||
}
|
||||
|
||||
metrics.putMetric(ret.getData().getMetrics());
|
||||
|
||||
if (!EnvUtil.isOnline()) {
|
||||
LOGGER.info("method=ReplicaMetricCollector||clusterPhyId={}||topicName={}||partitionId={}||metricName={}||metricValue={}",
|
||||
clusterPhyId, metrics.getTopic(), metrics.getPartitionId(), v.getName(), JSON.toJSONString(ret.getData().getMetrics()));
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("method=ReplicaMetricCollector||clusterPhyId={}||topicName={}||partition={}||metricName={}||errMsg=exception!",
|
||||
clusterPhyId, metrics.getTopic(), metrics.getPartitionId(), v.getName(), e);
|
||||
LOGGER.error(
|
||||
"method=collectMetrics||clusterPhyId={}||topicName={}||partition={}||metricName={}||errMsg=exception!",
|
||||
clusterPhyId, metrics.getTopic(), metrics.getPartitionId(), v.getName(), e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,8 +11,6 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionContro
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.TopicMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.FutureWaitUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicMetricService;
|
||||
@@ -32,8 +30,8 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT
|
||||
* @author didi
|
||||
*/
|
||||
@Component
|
||||
public class TopicMetricCollector extends AbstractMetricCollector<List<TopicMetrics>> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
public class TopicMetricCollector extends AbstractMetricCollector<TopicMetrics> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog(TopicMetricCollector.class);
|
||||
|
||||
@Autowired
|
||||
private VersionControlService versionControlService;
|
||||
@@ -47,8 +45,7 @@ public class TopicMetricCollector extends AbstractMetricCollector<List<TopicMetr
|
||||
private static final Integer AGG_METRICS_BROKER_ID = -10000;
|
||||
|
||||
@Override
|
||||
public void collectMetrics(ClusterPhy clusterPhy) {
|
||||
Long startTime = System.currentTimeMillis();
|
||||
public List<TopicMetrics> collectKafkaMetrics(ClusterPhy clusterPhy) {
|
||||
Long clusterPhyId = clusterPhy.getId();
|
||||
List<Topic> topics = topicService.listTopicsFromCacheFirst(clusterPhyId);
|
||||
List<VersionControlItem> items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode());
|
||||
@@ -65,7 +62,7 @@ public class TopicMetricCollector extends AbstractMetricCollector<List<TopicMetr
|
||||
allMetricsMap.put(topic.getTopicName(), metricsMap);
|
||||
|
||||
future.runnableTask(
|
||||
String.format("method=TopicMetricCollector||clusterPhyId=%d||topicName=%s", clusterPhyId, topic.getTopicName()),
|
||||
String.format("class=TopicMetricCollector||clusterPhyId=%d||topicName=%s", clusterPhyId, topic.getTopicName()),
|
||||
30000,
|
||||
() -> collectMetrics(clusterPhyId, topic.getTopicName(), metricsMap, items)
|
||||
);
|
||||
@@ -78,8 +75,7 @@ public class TopicMetricCollector extends AbstractMetricCollector<List<TopicMetr
|
||||
|
||||
this.publishMetric(new TopicMetricEvent(this, metricsList));
|
||||
|
||||
LOGGER.info("method=TopicMetricCollector||clusterPhyId={}||startTime={}||costTime={}||msg=collect finished.",
|
||||
clusterPhyId, startTime, System.currentTimeMillis() - startTime);
|
||||
return metricsList;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -119,14 +115,9 @@ public class TopicMetricCollector extends AbstractMetricCollector<List<TopicMetr
|
||||
metricsMap.get(metrics.getBrokerId()).putMetric(metrics.getMetrics());
|
||||
}
|
||||
});
|
||||
|
||||
if (!EnvUtil.isOnline()) {
|
||||
LOGGER.info("method=TopicMetricCollector||clusterPhyId={}||topicName={}||metricName={}||metricValue={}.",
|
||||
clusterPhyId, topicName, v.getName(), ConvertUtil.obj2Json(ret.getData())
|
||||
);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("method=TopicMetricCollector||clusterPhyId={}||topicName={}||metricName={}||errMsg=exception!",
|
||||
LOGGER.error(
|
||||
"method=collectMetrics||clusterPhyId={}||topicName={}||metricName={}||errMsg=exception!",
|
||||
clusterPhyId, topicName, v.getName(), e
|
||||
);
|
||||
}
|
||||
|
||||
@@ -15,10 +15,8 @@ import com.xiaojukeji.know.streaming.km.common.bean.event.metric.ZookeeperMetric
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.zookeeper.ZookeeperInfo;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.ZookeeperMetrics;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.ZookeeperMetricPO;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.kafkacontroller.KafkaControllerService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.version.VersionControlService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.zookeeper.ZookeeperMetricService;
|
||||
@@ -26,7 +24,7 @@ import com.xiaojukeji.know.streaming.km.core.service.zookeeper.ZookeeperService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@@ -36,8 +34,8 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT
|
||||
* @author didi
|
||||
*/
|
||||
@Component
|
||||
public class ZookeeperMetricCollector extends AbstractMetricCollector<ZookeeperMetricPO> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
public class ZookeeperMetricCollector extends AbstractMetricCollector<ZookeeperMetrics> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog(ZookeeperMetricCollector.class);
|
||||
|
||||
@Autowired
|
||||
private VersionControlService versionControlService;
|
||||
@@ -52,7 +50,7 @@ public class ZookeeperMetricCollector extends AbstractMetricCollector<ZookeeperM
|
||||
private KafkaControllerService kafkaControllerService;
|
||||
|
||||
@Override
|
||||
public void collectMetrics(ClusterPhy clusterPhy) {
|
||||
public List<ZookeeperMetrics> collectKafkaMetrics(ClusterPhy clusterPhy) {
|
||||
Long startTime = System.currentTimeMillis();
|
||||
Long clusterPhyId = clusterPhy.getId();
|
||||
List<VersionControlItem> items = versionControlService.listVersionControlItem(clusterPhyId, collectorType().getCode());
|
||||
@@ -62,11 +60,11 @@ public class ZookeeperMetricCollector extends AbstractMetricCollector<ZookeeperM
|
||||
.collect(Collectors.toList());
|
||||
KafkaController kafkaController = kafkaControllerService.getKafkaControllerFromDB(clusterPhyId);
|
||||
|
||||
ZookeeperMetrics metrics = ZookeeperMetrics.initWithMetric(clusterPhyId, Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, (float)Constant.INVALID_CODE);
|
||||
ZookeeperMetrics metrics = ZookeeperMetrics.initWithMetric(clusterPhyId, Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, Constant.COLLECT_METRICS_ERROR_COST_TIME);
|
||||
if (ValidateUtils.isEmptyList(aliveZKList)) {
|
||||
// 没有存活的ZK时,发布事件,然后直接返回
|
||||
publishMetric(new ZookeeperMetricEvent(this, Arrays.asList(metrics)));
|
||||
return;
|
||||
publishMetric(new ZookeeperMetricEvent(this, Collections.singletonList(metrics)));
|
||||
return Collections.singletonList(metrics);
|
||||
}
|
||||
|
||||
// 构造参数
|
||||
@@ -83,6 +81,7 @@ public class ZookeeperMetricCollector extends AbstractMetricCollector<ZookeeperM
|
||||
if(null != metrics.getMetrics().get(v.getName())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
param.setMetricName(v.getName());
|
||||
|
||||
Result<ZookeeperMetrics> ret = zookeeperMetricService.collectMetricsFromZookeeper(param);
|
||||
@@ -91,16 +90,9 @@ public class ZookeeperMetricCollector extends AbstractMetricCollector<ZookeeperM
|
||||
}
|
||||
|
||||
metrics.putMetric(ret.getData().getMetrics());
|
||||
|
||||
if(!EnvUtil.isOnline()){
|
||||
LOGGER.info(
|
||||
"class=ZookeeperMetricCollector||method=collectMetrics||clusterPhyId={}||metricName={}||metricValue={}",
|
||||
clusterPhyId, v.getName(), ConvertUtil.obj2Json(ret.getData().getMetrics())
|
||||
);
|
||||
}
|
||||
} catch (Exception e){
|
||||
LOGGER.error(
|
||||
"class=ZookeeperMetricCollector||method=collectMetrics||clusterPhyId={}||metricName={}||errMsg=exception!",
|
||||
"method=collectMetrics||clusterPhyId={}||metricName={}||errMsg=exception!",
|
||||
clusterPhyId, v.getName(), e
|
||||
);
|
||||
}
|
||||
@@ -108,12 +100,9 @@ public class ZookeeperMetricCollector extends AbstractMetricCollector<ZookeeperM
|
||||
|
||||
metrics.putMetric(Constant.COLLECT_METRICS_COST_TIME_METRICS_NAME, (System.currentTimeMillis() - startTime) / 1000.0f);
|
||||
|
||||
publishMetric(new ZookeeperMetricEvent(this, Arrays.asList(metrics)));
|
||||
this.publishMetric(new ZookeeperMetricEvent(this, Collections.singletonList(metrics)));
|
||||
|
||||
LOGGER.info(
|
||||
"class=ZookeeperMetricCollector||method=collectMetrics||clusterPhyId={}||startTime={}||costTime={}||msg=msg=collect finished.",
|
||||
clusterPhyId, startTime, System.currentTimeMillis() - startTime
|
||||
);
|
||||
return Collections.singletonList(metrics);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -3,67 +3,47 @@ package com.xiaojukeji.know.streaming.km.collector.sink;
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.BaseESPO;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.NamedThreadFactory;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.FutureUtil;
|
||||
import com.xiaojukeji.know.streaming.km.persistence.es.dao.BaseMetricESDAO;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.LinkedBlockingDeque;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public abstract class AbstractMetricESSender {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
private static final ILog LOGGER = LogFactory.getLog(AbstractMetricESSender.class);
|
||||
|
||||
private static final int THRESHOLD = 100;
|
||||
private static final int THRESHOLD = 100;
|
||||
|
||||
private static final ThreadPoolExecutor esExecutor = new ThreadPoolExecutor(
|
||||
private static final FutureUtil<Void> esExecutor = FutureUtil.init(
|
||||
"MetricsESSender",
|
||||
10,
|
||||
20,
|
||||
6000,
|
||||
TimeUnit.MILLISECONDS,
|
||||
new LinkedBlockingDeque<>(1000),
|
||||
new NamedThreadFactory("KM-Collect-MetricESSender-ES"),
|
||||
(r, e) -> LOGGER.warn("class=MetricESSender||msg=KM-Collect-MetricESSender-ES Deque is blocked, taskCount:{}" + e.getTaskCount())
|
||||
10000
|
||||
);
|
||||
|
||||
/**
|
||||
* 根据不同监控维度来发送
|
||||
*/
|
||||
protected boolean send2es(String index, List<? extends BaseESPO> statsList){
|
||||
protected boolean send2es(String index, List<? extends BaseESPO> statsList) {
|
||||
LOGGER.info("method=send2es||indexName={}||metricsSize={}||msg=send metrics to es", index, statsList.size());
|
||||
|
||||
if (CollectionUtils.isEmpty(statsList)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!EnvUtil.isOnline()) {
|
||||
LOGGER.info("class=MetricESSender||method=send2es||ariusStats={}||size={}",
|
||||
index, statsList.size());
|
||||
}
|
||||
|
||||
BaseMetricESDAO baseMetricESDao = BaseMetricESDAO.getByStatsType(index);
|
||||
if (Objects.isNull( baseMetricESDao )) {
|
||||
LOGGER.error("class=MetricESSender||method=send2es||errMsg=fail to find {}", index);
|
||||
if (Objects.isNull(baseMetricESDao)) {
|
||||
LOGGER.error("method=send2es||indexName={}||errMsg=find dao failed", index);
|
||||
return false;
|
||||
}
|
||||
|
||||
int size = statsList.size();
|
||||
int num = (size) % THRESHOLD == 0 ? (size / THRESHOLD) : (size / THRESHOLD + 1);
|
||||
for (int i = 0; i < statsList.size(); i += THRESHOLD) {
|
||||
final int idxStart = i;
|
||||
|
||||
if (size < THRESHOLD) {
|
||||
esExecutor.execute(
|
||||
() -> baseMetricESDao.batchInsertStats(statsList)
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
for (int i = 1; i < num + 1; i++) {
|
||||
int end = (i * THRESHOLD) > size ? size : (i * THRESHOLD);
|
||||
int start = (i - 1) * THRESHOLD;
|
||||
|
||||
esExecutor.execute(
|
||||
() -> baseMetricESDao.batchInsertStats(statsList.subList(start, end))
|
||||
// 异步发送
|
||||
esExecutor.submitTask(
|
||||
() -> baseMetricESDao.batchInsertStats(statsList.subList(idxStart, Math.min(idxStart + THRESHOLD, statsList.size())))
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -14,11 +14,11 @@ import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.B
|
||||
|
||||
@Component
|
||||
public class BrokerMetricESSender extends AbstractMetricESSender implements ApplicationListener<BrokerMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
private static final ILog LOGGER = LogFactory.getLog(BrokerMetricESSender.class);
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=BrokerMetricESSender||method=init||msg=init finished");
|
||||
LOGGER.info("method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -15,11 +15,11 @@ import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.C
|
||||
|
||||
@Component
|
||||
public class ClusterMetricESSender extends AbstractMetricESSender implements ApplicationListener<ClusterMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
private static final ILog LOGGER = LogFactory.getLog(ClusterMetricESSender.class);
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=ClusterMetricESSender||method=init||msg=init finished");
|
||||
LOGGER.info("method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -15,11 +15,11 @@ import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.G
|
||||
|
||||
@Component
|
||||
public class GroupMetricESSender extends AbstractMetricESSender implements ApplicationListener<GroupMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
private static final ILog LOGGER = LogFactory.getLog(GroupMetricESSender.class);
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=GroupMetricESSender||method=init||msg=init finished");
|
||||
LOGGER.info("method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -14,11 +14,11 @@ import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.P
|
||||
|
||||
@Component
|
||||
public class PartitionMetricESSender extends AbstractMetricESSender implements ApplicationListener<PartitionMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
private static final ILog LOGGER = LogFactory.getLog(PartitionMetricESSender.class);
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=PartitionMetricESSender||method=init||msg=init finished");
|
||||
LOGGER.info("method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -14,11 +14,11 @@ import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.R
|
||||
|
||||
@Component
|
||||
public class ReplicaMetricESSender extends AbstractMetricESSender implements ApplicationListener<ReplicaMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
private static final ILog LOGGER = LogFactory.getLog(ReplicaMetricESSender.class);
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=GroupMetricESSender||method=init||msg=init finished");
|
||||
LOGGER.info("method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -15,11 +15,11 @@ import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.T
|
||||
|
||||
@Component
|
||||
public class TopicMetricESSender extends AbstractMetricESSender implements ApplicationListener<TopicMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
private static final ILog LOGGER = LogFactory.getLog(TopicMetricESSender.class);
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=TopicMetricESSender||method=init||msg=init finished");
|
||||
LOGGER.info("method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -14,11 +14,11 @@ import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.Z
|
||||
|
||||
@Component
|
||||
public class ZookeeperMetricESSender extends AbstractMetricESSender implements ApplicationListener<ZookeeperMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
private static final ILog LOGGER = LogFactory.getLog(ZookeeperMetricESSender.class);
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=ZookeeperMetricESSender||method=init||msg=init finished");
|
||||
LOGGER.info("method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
Reference in New Issue
Block a user