mirror of
https://github.com/didi/KnowStreaming.git
synced 2025-12-24 20:22:12 +08:00
[Optimize]优化日志输出 & 本地缓存统一管理(#800)
This commit is contained in:
@@ -3,6 +3,7 @@ package com.xiaojukeji.know.streaming.km.core.cache;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.ClusterMetrics;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.TopicMetrics;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.partition.Partition;
|
||||
|
||||
import java.util.List;
|
||||
@@ -10,6 +11,11 @@ import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class DataBaseDataLocalCache {
|
||||
private static final Cache<Long, Map<String, TopicMetrics>> topicLatestMetricsCache = Caffeine.newBuilder()
|
||||
.expireAfterWrite(360, TimeUnit.SECONDS)
|
||||
.maximumSize(500)
|
||||
.build();
|
||||
|
||||
private static final Cache<Long, ClusterMetrics> clusterLatestMetricsCache = Caffeine.newBuilder()
|
||||
.expireAfterWrite(180, TimeUnit.SECONDS)
|
||||
.maximumSize(500)
|
||||
@@ -20,6 +26,14 @@ public class DataBaseDataLocalCache {
|
||||
.maximumSize(500)
|
||||
.build();
|
||||
|
||||
public static Map<String, TopicMetrics> getTopicMetrics(Long clusterPhyId) {
|
||||
return topicLatestMetricsCache.getIfPresent(clusterPhyId);
|
||||
}
|
||||
|
||||
public static void putTopicMetrics(Long clusterPhyId, Map<String, TopicMetrics> metricsMap) {
|
||||
topicLatestMetricsCache.put(clusterPhyId, metricsMap);
|
||||
}
|
||||
|
||||
public static ClusterMetrics getClusterLatestMetrics(Long clusterPhyId) {
|
||||
return clusterLatestMetricsCache.getIfPresent(clusterPhyId);
|
||||
}
|
||||
|
||||
@@ -4,13 +4,18 @@ import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.ClusterMetrics;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.TopicMetrics;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.partition.Partition;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.Topic;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.FutureUtil;
|
||||
import com.xiaojukeji.know.streaming.km.core.cache.DataBaseDataLocalCache;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterMetricService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicMetricService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService;
|
||||
import com.xiaojukeji.know.streaming.km.persistence.cache.LoadedClusterPhyCache;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Service;
|
||||
@@ -18,11 +23,19 @@ import org.springframework.stereotype.Service;
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
public class DatabaseDataFlusher {
|
||||
private static final ILog LOGGER = LogFactory.getLog(DatabaseDataFlusher.class);
|
||||
|
||||
@Autowired
|
||||
private TopicService topicService;
|
||||
|
||||
@Autowired
|
||||
private TopicMetricService topicMetricService;
|
||||
|
||||
@Autowired
|
||||
private ClusterPhyService clusterPhyService;
|
||||
|
||||
@@ -37,6 +50,8 @@ public class DatabaseDataFlusher {
|
||||
this.flushPartitionsCache();
|
||||
|
||||
this.flushClusterLatestMetricsCache();
|
||||
|
||||
this.flushTopicLatestMetricsCache();
|
||||
}
|
||||
|
||||
@Scheduled(cron="0 0/1 * * * ?")
|
||||
@@ -81,4 +96,27 @@ public class DatabaseDataFlusher {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Scheduled(cron = "0 0/1 * * * ?")
|
||||
private void flushTopicLatestMetricsCache() {
|
||||
for (ClusterPhy clusterPhy: LoadedClusterPhyCache.listAll().values()) {
|
||||
FutureUtil.quickStartupFutureUtil.submitTask(() -> {
|
||||
try {
|
||||
|
||||
List<String> topicNameList = topicService.listTopicsFromCacheFirst(clusterPhy.getId()).stream().map(Topic::getTopicName).collect(Collectors.toList());
|
||||
|
||||
List<TopicMetrics> metricsList = topicMetricService.listTopicLatestMetricsFromES(clusterPhy.getId(), topicNameList, Collections.emptyList());
|
||||
|
||||
Map<String, TopicMetrics> metricsMap = metricsList
|
||||
.stream()
|
||||
.collect(Collectors.toMap(TopicMetrics::getTopic, Function.identity()));
|
||||
|
||||
DataBaseDataLocalCache.putTopicMetrics(clusterPhy.getId(), metricsMap);
|
||||
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("method=flushTopicLatestMetricsCache||clusterPhyId={}||errMsg=exception!", clusterPhy.getId(), e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,4 +17,5 @@ public interface PlatformClusterConfigService {
|
||||
|
||||
Map<String, PlatformClusterConfigPO> getByClusterAndGroupWithoutDefault(Long clusterPhyId, String group);
|
||||
|
||||
Map<Long, Map<String, PlatformClusterConfigPO>> listByGroup(String groupName);
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import com.xiaojukeji.know.streaming.km.persistence.mysql.config.PlatformCluster
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
@@ -68,4 +69,20 @@ public class PlatformClusterConfigServiceImpl implements PlatformClusterConfigSe
|
||||
|
||||
return configPOMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Long, Map<String, PlatformClusterConfigPO>> listByGroup(String groupName) {
|
||||
LambdaQueryWrapper<PlatformClusterConfigPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
||||
lambdaQueryWrapper.eq(PlatformClusterConfigPO::getValueGroup, groupName);
|
||||
|
||||
List<PlatformClusterConfigPO> poList = platformClusterConfigDAO.selectList(lambdaQueryWrapper);
|
||||
|
||||
Map<Long, Map<String, PlatformClusterConfigPO>> poMap = new HashMap<>();
|
||||
poList.forEach(elem -> {
|
||||
poMap.putIfAbsent(elem.getClusterId(), new HashMap<>());
|
||||
poMap.get(elem.getClusterId()).put(elem.getValueName(), elem);
|
||||
});
|
||||
|
||||
return poMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ public interface TopicMetricService {
|
||||
/**
|
||||
* 优先从本地缓存获取metrics信息
|
||||
*/
|
||||
Map<String, TopicMetrics> getLatestMetricsFromCacheFirst(Long clusterPhyId);
|
||||
Map<String, TopicMetrics> getLatestMetricsFromCache(Long clusterPhyId);
|
||||
|
||||
/**
|
||||
* 获取Topic在具体Broker上最新的一个指标
|
||||
|
||||
@@ -2,13 +2,10 @@ package com.xiaojukeji.know.streaming.km.core.service.topic.impl;
|
||||
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import com.google.common.collect.Table;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricDTO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricsTopicDTO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.broker.Broker;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.PartitionMetrics;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.TopicMetrics;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.VersionItemParam;
|
||||
@@ -30,25 +27,22 @@ import com.xiaojukeji.know.streaming.km.common.utils.BeanUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
||||
import com.xiaojukeji.know.streaming.km.core.cache.CollectedMetricsLocalCache;
|
||||
import com.xiaojukeji.know.streaming.km.core.cache.DataBaseDataLocalCache;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.health.state.HealthStateService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionMetricService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicMetricService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.version.BaseMetricService;
|
||||
import com.xiaojukeji.know.streaming.km.persistence.cache.LoadedClusterPhyCache;
|
||||
import com.xiaojukeji.know.streaming.km.persistence.es.dao.TopicMetricESDAO;
|
||||
import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaJMXClient;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import javax.management.InstanceNotFoundException;
|
||||
import javax.management.ObjectName;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus.*;
|
||||
@@ -58,8 +52,7 @@ import static com.xiaojukeji.know.streaming.km.core.service.version.metrics.kafk
|
||||
*/
|
||||
@Service
|
||||
public class TopicMetricServiceImpl extends BaseMetricService implements TopicMetricService {
|
||||
|
||||
private static final ILog LOGGER = LogFactory.getLog( TopicMetricServiceImpl.class);
|
||||
private static final ILog LOGGER = LogFactory.getLog(TopicMetricServiceImpl.class);
|
||||
|
||||
public static final String TOPIC_METHOD_DO_NOTHING = "doNothing";
|
||||
public static final String TOPIC_METHOD_GET_HEALTH_SCORE = "getMetricHealthScore";
|
||||
@@ -86,18 +79,6 @@ public class TopicMetricServiceImpl extends BaseMetricService implements TopicMe
|
||||
@Autowired
|
||||
private TopicMetricESDAO topicMetricESDAO;
|
||||
|
||||
private final Cache<Long, Map<String, TopicMetrics>> topicLatestMetricsCache = Caffeine.newBuilder()
|
||||
.expireAfterWrite(5, TimeUnit.MINUTES)
|
||||
.maximumSize(200)
|
||||
.build();
|
||||
|
||||
@Scheduled(cron = "0 0/2 * * * ?")
|
||||
private void flushClusterLatestMetricsCache() {
|
||||
for (ClusterPhy clusterPhy: LoadedClusterPhyCache.listAll().values()) {
|
||||
this.updateCacheAndGetMetrics(clusterPhy.getId());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected VersionItemTypeEnum getVersionItemType() {
|
||||
return VersionItemTypeEnum.METRIC_TOPIC;
|
||||
@@ -152,13 +133,13 @@ public class TopicMetricServiceImpl extends BaseMetricService implements TopicMe
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, TopicMetrics> getLatestMetricsFromCacheFirst(Long clusterPhyId) {
|
||||
Map<String, TopicMetrics> metricsMap = topicLatestMetricsCache.getIfPresent(clusterPhyId);
|
||||
if (metricsMap != null) {
|
||||
return metricsMap;
|
||||
public Map<String, TopicMetrics> getLatestMetricsFromCache(Long clusterPhyId) {
|
||||
Map<String, TopicMetrics> metricsMap = DataBaseDataLocalCache.getTopicMetrics(clusterPhyId);
|
||||
if (metricsMap == null) {
|
||||
return new HashMap<>();
|
||||
}
|
||||
|
||||
return this.updateCacheAndGetMetrics(clusterPhyId);
|
||||
return metricsMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -308,19 +289,8 @@ public class TopicMetricServiceImpl extends BaseMetricService implements TopicMe
|
||||
return Result.buildSuc(count);
|
||||
}
|
||||
|
||||
|
||||
/**************************************************** private method ****************************************************/
|
||||
private Map<String, TopicMetrics> updateCacheAndGetMetrics(Long clusterPhyId) {
|
||||
List<String> topicNames = topicService.listTopicsFromDB(clusterPhyId)
|
||||
.stream().map(Topic::getTopicName).collect(Collectors.toList());
|
||||
|
||||
List<TopicMetrics> metrics = listTopicLatestMetricsFromES(clusterPhyId, topicNames, Arrays.asList());
|
||||
|
||||
Map<String, TopicMetrics> metricsMap = metrics.stream()
|
||||
.collect(Collectors.toMap(TopicMetrics::getTopic, Function.identity()));
|
||||
|
||||
topicLatestMetricsCache.put(clusterPhyId, metricsMap);
|
||||
return metricsMap;
|
||||
}
|
||||
|
||||
|
||||
private List<String> listTopNTopics(Long clusterId, int topN){
|
||||
|
||||
Reference in New Issue
Block a user