Merge branch 'dev' into dev_v2.6.0

This commit is contained in:
kingdomrushing
2022-01-13 11:39:50 +08:00
committed by GitHub
93 changed files with 1441 additions and 408 deletions

View File

@@ -30,16 +30,23 @@ public class CollectAndPublishCommunityTopicMetrics extends AbstractScheduledTas
@Override
protected List<ClusterDO> listAllTasks() {
// 获取需要进行指标采集的集群列表这些集群将会被拆分到多台KM中进行执行。
return clusterService.list();
}
@Override
public void processTask(ClusterDO clusterDO) {
// 这里需要实现对clusterDO这个集群进行Topic指标采集的代码逻辑
// 进行Topic指标获取
List<TopicMetrics> metricsList = getTopicMetrics(clusterDO.getId());
// 获取到Topic流量指标之后发布一个事件
SpringTool.publish(new TopicMetricsCollectedEvent(this, clusterDO.getId(), metricsList));
}
private List<TopicMetrics> getTopicMetrics(Long clusterId) {
// 具体获取Topic流量指标的入口代码
List<TopicMetrics> metricsList =
jmxService.getTopicMetrics(clusterId, KafkaMetricsCollections.TOPIC_METRICS_TO_DB, true);
if (ValidateUtils.isEmptyList(metricsList)) {

View File

@@ -1,15 +1,15 @@
package com.xiaojukeji.kafka.manager.task.dispatch.metrics.delete;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.constant.LogConstant;
import com.xiaojukeji.kafka.manager.common.utils.BackoffUtils;
import com.xiaojukeji.kafka.manager.dao.*;
import com.xiaojukeji.kafka.manager.service.utils.ConfigUtils;
import com.xiaojukeji.kafka.manager.task.component.AbstractScheduledTask;
import com.xiaojukeji.kafka.manager.task.component.CustomScheduled;
import com.xiaojukeji.kafka.manager.task.component.EmptyEntry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import java.util.Arrays;
import java.util.Date;
@@ -22,10 +22,7 @@ import java.util.List;
*/
@CustomScheduled(name = "deleteMetrics", cron = "0 0/2 * * * ?", threadNum = 1)
public class DeleteMetrics extends AbstractScheduledTask<EmptyEntry> {
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
@Autowired
private ConfigUtils configUtils;
private static final Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
@Autowired
private TopicMetricsDao topicMetricsDao;
@@ -45,6 +42,27 @@ public class DeleteMetrics extends AbstractScheduledTask<EmptyEntry> {
@Autowired
private TopicThrottledMetricsDao topicThrottledMetricsDao;
@Value(value = "${task.metrics.delete-metrics.delete-limit-size:1000}")
private Integer deleteLimitSize;
@Value(value = "${task.metrics.delete-metrics.cluster-metrics-save-days:14}")
private Integer clusterMetricsSaveDays;
@Value(value = "${task.metrics.delete-metrics.broker-metrics-save-days:14}")
private Integer brokerMetricsSaveDays;
@Value(value = "${task.metrics.delete-metrics.topic-metrics-save-days:7}")
private Integer topicMetricsSaveDays;
@Value(value = "${task.metrics.delete-metrics.topic-request-time-metrics-save-days:7}")
private Integer topicRequestTimeMetricsSaveDays;
@Value(value = "${task.metrics.delete-metrics.topic-throttled-metrics-save-days:7}")
private Integer topicThrottledMetricsSaveDays;
@Value(value = "${task.metrics.delete-metrics.app-topic-metrics-save-days:7}")
private Integer appTopicMetricsSaveDays;
@Override
public List<EmptyEntry> listAllTasks() {
EmptyEntry emptyEntry = new EmptyEntry();
@@ -54,78 +72,73 @@ public class DeleteMetrics extends AbstractScheduledTask<EmptyEntry> {
@Override
public void processTask(EmptyEntry entryEntry) {
if (Constant.INVALID_CODE.equals(configUtils.getMaxMetricsSaveDays())) {
// 无需数据删除
return;
}
long startTime = System.currentTimeMillis();
LOGGER.info("start delete metrics");
try {
deleteTopicMetrics();
} catch (Exception e) {
LOGGER.error("delete topic metrics failed.", e);
// 数据量可能比较大,一次触发多删除几次
for (int i = 0; i < 10; ++i) {
try {
boolean needReDelete = this.deleteCommunityTopicMetrics();
if (!needReDelete) {
break;
}
// 暂停1000毫秒避免删除太快导致DB出现问题
BackoffUtils.backoff(1000);
} catch (Exception e) {
LOGGER.error("delete community topic metrics failed.", e);
}
}
// 数据量可能比较大,一次触发多删除几次
for (int i = 0; i < 10; ++i) {
try {
boolean needReDelete = this.deleteDiDiTopicMetrics();
if (!needReDelete) {
break;
}
// 暂停1000毫秒避免删除太快导致DB出现问题
BackoffUtils.backoff(1000);
} catch (Exception e) {
LOGGER.error("delete didi topic metrics failed.", e);
}
}
try {
deleteTopicAppMetrics();
this.deleteClusterBrokerMetrics();
} catch (Exception e) {
LOGGER.error("delete topic app metrics failed.", e);
LOGGER.error("delete cluster and broker metrics failed.", e);
}
try {
deleteTopicRequestMetrics();
} catch (Exception e) {
LOGGER.error("delete topic request metrics failed.", e);
}
try {
deleteThrottledMetrics();
} catch (Exception e) {
LOGGER.error("delete topic throttled metrics failed.", e);
}
try {
deleteBrokerMetrics();
} catch (Exception e) {
LOGGER.error("delete broker metrics failed.", e);
}
try {
deleteClusterMetrics();
} catch (Exception e) {
LOGGER.error("delete cluster metrics failed.", e);
}
LOGGER.info("finish delete metrics, costTime:{}ms.", System.currentTimeMillis() - startTime);
}
private void deleteTopicMetrics() {
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
topicMetricsDao.deleteBeforeTime(endTime);
private boolean deleteCommunityTopicMetrics() {
return topicMetricsDao.deleteBeforeTime(new Date(System.currentTimeMillis() - this.topicMetricsSaveDays * 24 * 60 * 60 * 1000), this.deleteLimitSize) >= this.deleteLimitSize;
}
private void deleteTopicAppMetrics() {
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
topicAppMetricsDao.deleteBeforeTime(endTime);
private boolean deleteDiDiTopicMetrics() {
boolean needReDelete = false;
if (topicAppMetricsDao.deleteBeforeTime(new Date(System.currentTimeMillis() - this.appTopicMetricsSaveDays * 24 * 60 * 60 * 1000), this.deleteLimitSize) >= this.deleteLimitSize) {
needReDelete = true;
}
if (topicRequestMetricsDao.deleteBeforeTime(new Date(System.currentTimeMillis() - this.topicRequestTimeMetricsSaveDays * 24 * 60 * 60 * 1000), this.deleteLimitSize) >= this.deleteLimitSize) {
needReDelete = true;
}
if (topicThrottledMetricsDao.deleteBeforeTime(new Date(System.currentTimeMillis() - this.topicThrottledMetricsSaveDays * 24 * 60 * 60 * 1000), this.deleteLimitSize) >= this.deleteLimitSize) {
needReDelete = true;
}
return needReDelete;
}
private void deleteTopicRequestMetrics() {
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
topicRequestMetricsDao.deleteBeforeTime(endTime);
}
private void deleteClusterBrokerMetrics() {
brokerMetricsDao.deleteBeforeTime(new Date(System.currentTimeMillis() - this.brokerMetricsSaveDays * 24 * 60 * 60 * 1000), this.deleteLimitSize);
private void deleteThrottledMetrics() {
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
topicThrottledMetricsDao.deleteBeforeTime(endTime);
}
private void deleteBrokerMetrics() {
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
brokerMetricsDao.deleteBeforeTime(endTime);
}
private void deleteClusterMetrics() {
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
clusterMetricsDao.deleteBeforeTime(endTime);
clusterMetricsDao.deleteBeforeTime(new Date(System.currentTimeMillis() - this.clusterMetricsSaveDays * 24 * 60 * 60 * 1000), this.deleteLimitSize);
}
}

View File

@@ -1,5 +1,6 @@
package com.xiaojukeji.kafka.manager.task.dispatch.metrics.store;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.constant.KafkaMetricsCollections;
import com.xiaojukeji.kafka.manager.common.constant.LogConstant;
import com.xiaojukeji.kafka.manager.common.entity.metrics.BaseMetrics;
@@ -54,8 +55,6 @@ public class StoreBrokerMetrics extends AbstractScheduledTask<ClusterDO> {
@Autowired
private AbstractHealthScoreStrategy healthScoreStrategy;
private static final Integer INSERT_BATCH_SIZE = 100;
@Override
protected List<ClusterDO> listAllTasks() {
return clusterService.list();
@@ -72,7 +71,7 @@ public class StoreBrokerMetrics extends AbstractScheduledTask<ClusterDO> {
clusterDO.getId(),
MetricsConvertUtils.merge2BaseMetricsByAdd(brokerMetricsList))
);
} catch (Throwable t) {
} catch (Exception t) {
LOGGER.error("collect failed, clusterId:{}.", clusterDO.getId(), t);
}
long endTime = System.currentTimeMillis();
@@ -82,6 +81,11 @@ public class StoreBrokerMetrics extends AbstractScheduledTask<ClusterDO> {
startTime,
clusterMetricsList
);
if (ValidateUtils.isEmptyList(doList)) {
return;
}
clusterMetricsDao.batchAdd(doList);
}
@@ -110,9 +114,15 @@ public class StoreBrokerMetrics extends AbstractScheduledTask<ClusterDO> {
MetricsConvertUtils.convertAndUpdateCreateTime2BrokerMetricsDOList(startTime, metricsList);
int i = 0;
do {
brokerMetricsDao.batchAdd(doList.subList(i, Math.min(i + INSERT_BATCH_SIZE, doList.size())));
i += INSERT_BATCH_SIZE;
List<BrokerMetricsDO> subDOList = doList.subList(i, Math.min(i + Constant.BATCH_INSERT_SIZE, doList.size()));
if (ValidateUtils.isEmptyList(subDOList)) {
break;
}
brokerMetricsDao.batchAdd(subDOList);
i += Constant.BATCH_INSERT_SIZE;
} while (i < doList.size());
return metricsList;
}

View File

@@ -17,7 +17,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
import java.util.*;
@@ -28,7 +27,7 @@ import java.util.*;
@CustomScheduled(name = "storeDiDiAppTopicMetrics", cron = "41 0/1 * * * ?", threadNum = 5)
@ConditionalOnProperty(prefix = "custom.store-metrics-task.didi", name = "app-topic-metrics-enabled", havingValue = "true", matchIfMissing = true)
public class StoreDiDiAppTopicMetrics extends AbstractScheduledTask<ClusterDO> {
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
private static final Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
@Autowired
private JmxService jmxService;
@@ -50,7 +49,7 @@ public class StoreDiDiAppTopicMetrics extends AbstractScheduledTask<ClusterDO> {
try {
getAndBatchAddTopicAppMetrics(startTime, clusterDO.getId());
} catch (Throwable t) {
} catch (Exception t) {
LOGGER.error("save topic metrics failed, clusterId:{}.", clusterDO.getId(), t);
}
}
@@ -65,7 +64,12 @@ public class StoreDiDiAppTopicMetrics extends AbstractScheduledTask<ClusterDO> {
MetricsConvertUtils.convertAndUpdateCreateTime2TopicMetricsDOList(startTime, metricsList);
int i = 0;
do {
topicAppMetricsDao.batchAdd(doList.subList(i, Math.min(i + Constant.BATCH_INSERT_SIZE, doList.size())));
List<TopicMetricsDO> subDOList = doList.subList(i, Math.min(i + Constant.BATCH_INSERT_SIZE, doList.size()));
if (ValidateUtils.isEmptyList(subDOList)) {
return;
}
topicAppMetricsDao.batchAdd(subDOList);
i += Constant.BATCH_INSERT_SIZE;
} while (i < doList.size());
}

View File

@@ -27,7 +27,7 @@ import java.util.*;
@CustomScheduled(name = "storeDiDiTopicRequestTimeMetrics", cron = "51 0/1 * * * ?", threadNum = 5)
@ConditionalOnProperty(prefix = "custom.store-metrics-task.didi", name = "topic-request-time-metrics-enabled", havingValue = "true", matchIfMissing = true)
public class StoreDiDiTopicRequestTimeMetrics extends AbstractScheduledTask<ClusterDO> {
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
private static final Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
@Autowired
private JmxService jmxService;
@@ -51,7 +51,7 @@ public class StoreDiDiTopicRequestTimeMetrics extends AbstractScheduledTask<Clus
LOGGER.info("save topic metrics, clusterId:{}, start.", clusterDO.getId());
getAndBatchAddTopicRequestTimeMetrics(startTime, clusterDO.getId());
LOGGER.info("save topic metrics, clusterId:{}, end costTime:{}.", clusterDO.getId(), System.currentTimeMillis() - startTime);
} catch (Throwable t) {
} catch (Exception t) {
LOGGER.error("save topic metrics failed, clusterId:{}.", clusterDO.getId(), t);
}
}
@@ -69,7 +69,12 @@ public class StoreDiDiTopicRequestTimeMetrics extends AbstractScheduledTask<Clus
int i = 0;
do {
topicRequestMetricsDao.batchAdd(doList.subList(i, Math.min(i + Constant.BATCH_INSERT_SIZE, doList.size())));
List<TopicMetricsDO> subDOList = doList.subList(i, Math.min(i + Constant.BATCH_INSERT_SIZE, doList.size()));
if (ValidateUtils.isEmptyList(subDOList)) {
return;
}
topicRequestMetricsDao.batchAdd(subDOList);
i += Constant.BATCH_INSERT_SIZE;
} while (i < doList.size());
}

View File

@@ -25,7 +25,7 @@ import java.util.List;
@Component("storeCommunityTopicMetrics2DB")
@ConditionalOnProperty(prefix = "custom.store-metrics-task.community", name = "topic-metrics-enabled", havingValue = "true", matchIfMissing = true)
public class StoreCommunityTopicMetrics2DB implements ApplicationListener<TopicMetricsCollectedEvent> {
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
private static final Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
@Autowired
private TopicMetricsDao topicMetricsDao;
@@ -40,17 +40,21 @@ public class StoreCommunityTopicMetrics2DB implements ApplicationListener<TopicM
try {
store2DB(System.currentTimeMillis(), metricsList);
} catch (Throwable t) {
} catch (Exception t) {
LOGGER.error("save topic metrics failed, clusterId:{}.", event.getClusterId(), t);
}
}
private void store2DB(Long startTime, List<TopicMetrics> metricsList) throws Exception {
List<TopicMetricsDO> doList =
MetricsConvertUtils.convertAndUpdateCreateTime2TopicMetricsDOList(startTime, metricsList);
private void store2DB(Long startTime, List<TopicMetrics> metricsList) {
List<TopicMetricsDO> doList = MetricsConvertUtils.convertAndUpdateCreateTime2TopicMetricsDOList(startTime, metricsList);
int i = 0;
do {
topicMetricsDao.batchAdd(doList.subList(i, Math.min(i + Constant.BATCH_INSERT_SIZE, doList.size())));
List<TopicMetricsDO> subDOList = doList.subList(i, Math.min(i + Constant.BATCH_INSERT_SIZE, doList.size()));
if (ValidateUtils.isEmptyList(subDOList)) {
return;
}
topicMetricsDao.batchAdd(subDOList);
i += Constant.BATCH_INSERT_SIZE;
} while (i < doList.size());
}

View File

@@ -22,7 +22,7 @@ import java.util.*;
* @date 20/9/24
*/
@Component("storeTopicThrottledMetrics2DB")
@ConditionalOnProperty(prefix = "custom.store-metrics-task.didi", name = "topic-throttled-metrics", havingValue = "true", matchIfMissing = true)
@ConditionalOnProperty(prefix = "custom.store-metrics-task.didi", name = "topic-throttled-metrics-enabled", havingValue = "true", matchIfMissing = true)
public class StoreTopicThrottledMetrics2DB implements ApplicationListener<TopicThrottledMetricsCollectedEvent> {
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);

View File

@@ -0,0 +1,38 @@
package com.xiaojukeji.kafka.manager.task.listener.biz;
import com.xiaojukeji.kafka.manager.common.events.RegionCreatedEvent;
import com.xiaojukeji.kafka.manager.task.dispatch.biz.CalRegionCapacity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationListener;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
/**
* Region创建监听器,
* TODO 后续需要将其移动到core模块
* @author zengqiao
* @date 22/01/11
*/
@Component
public class RegionCreatedListener implements ApplicationListener<RegionCreatedEvent> {
private static final Logger logger = LoggerFactory.getLogger(RegionCreatedListener.class);
@Autowired
private CalRegionCapacity calRegionCapacity;
@Async
@Override
public void onApplicationEvent(RegionCreatedEvent event) {
try {
logger.info("cal region capacity started when region created, regionDO:{}.", event.getRegionDO());
calRegionCapacity.processTask(event.getRegionDO());
logger.info("cal region capacity finished when region created, regionDO:{}.", event.getRegionDO());
} catch (Exception e) {
logger.error("cal region capacity failed when region created, regionDO:{}.", event.getRegionDO(), e);
}
}
}