mirror of
https://github.com/didi/KnowStreaming.git
synced 2026-01-03 02:52:08 +08:00
[Optimize]去除Replica指标从ES读写的相关代码(#862)
This commit is contained in:
@@ -1,95 +0,0 @@
|
||||
package com.xiaojukeji.know.streaming.km.persistence.es.dao;
|
||||
|
||||
import com.didiglobal.logi.elasticsearch.client.response.query.query.ESQueryResponse;
|
||||
import com.didiglobal.logi.elasticsearch.client.response.query.query.aggs.ESAggr;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.ReplicationMetricPO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
|
||||
import com.xiaojukeji.know.streaming.km.persistence.es.dsls.DslConstant;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static com.xiaojukeji.know.streaming.km.common.constant.ESConstant.VALUE;
|
||||
import static com.xiaojukeji.know.streaming.km.persistence.es.template.TemplateConstant.REPLICATION_INDEX;
|
||||
|
||||
/**
|
||||
* @author didi
|
||||
*/
|
||||
@Component
|
||||
public class ReplicationMetricESDAO extends BaseMetricESDAO {
|
||||
|
||||
@PostConstruct
|
||||
public void init() {
|
||||
super.indexName = REPLICATION_INDEX;
|
||||
checkCurrentDayIndexExist();
|
||||
register(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取集群 clusterId 中 brokerId 最新的统计指标
|
||||
*/
|
||||
public ReplicationMetricPO getReplicationLatestMetrics(Long clusterPhyId, Integer brokerId, String topic,
|
||||
Integer partitionId, List<String> metricNames){
|
||||
Long endTime = getLatestMetricTime();
|
||||
Long startTime = endTime - FIVE_MIN;
|
||||
|
||||
String dsl = dslLoaderUtil.getFormatDslByFileName(
|
||||
DslConstant.GET_REPLICATION_LATEST_METRICS, clusterPhyId, brokerId, topic, partitionId, startTime, endTime);
|
||||
|
||||
ReplicationMetricPO replicationMetricPO = esOpClient.performRequestAndTakeFirst(
|
||||
realIndex(startTime, endTime), dsl, ReplicationMetricPO.class);
|
||||
|
||||
return (null == replicationMetricPO) ? new ReplicationMetricPO(clusterPhyId, topic, brokerId, partitionId)
|
||||
: filterMetrics(replicationMetricPO, metricNames);
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取集群 clusterPhyId 中每个 metric 的指定 partitionId 在指定时间[startTime、endTime]区间内聚合计算(avg、max)之后的统计值
|
||||
*/
|
||||
public Map<String/*metric*/, MetricPointVO> getReplicationMetricsPoint(Long clusterPhyId, String topic,
|
||||
Integer brokerId, Integer partitionId, List<String> metrics,
|
||||
String aggType, Long startTime, Long endTime){
|
||||
//1、获取需要查下的索引
|
||||
String realIndex = realIndex(startTime, endTime);
|
||||
|
||||
//2、构造agg查询条件
|
||||
String aggDsl = buildAggsDSL(metrics, aggType);
|
||||
|
||||
String dsl = dslLoaderUtil.getFormatDslByFileName(
|
||||
DslConstant.GET_REPLICATION_AGG_SINGLE_METRICS, clusterPhyId, brokerId,topic, partitionId, startTime, endTime, aggDsl);
|
||||
|
||||
return esOpClient.performRequestWithRouting(String.valueOf(brokerId), realIndex, dsl,
|
||||
s -> handleSingleESQueryResponse(s, metrics, aggType), 3);
|
||||
}
|
||||
|
||||
/**************************************************** private method ****************************************************/
|
||||
private Map<String/*metric*/, MetricPointVO> handleSingleESQueryResponse(ESQueryResponse response, List<String> metrics, String aggType){
|
||||
Map<String/*metric*/, MetricPointVO> metricMap = new HashMap<>();
|
||||
|
||||
if(null == response || null == response.getAggs()){
|
||||
return metricMap;
|
||||
}
|
||||
|
||||
Map<String, ESAggr> esAggrMap = response.getAggs().getEsAggrMap();
|
||||
if (null == esAggrMap) {
|
||||
return metricMap;
|
||||
}
|
||||
|
||||
for(String metric : metrics){
|
||||
String value = esAggrMap.get(metric).getUnusedMap().get(VALUE).toString();
|
||||
|
||||
MetricPointVO metricPoint = new MetricPointVO();
|
||||
metricPoint.setAggType(aggType);
|
||||
metricPoint.setValue(value);
|
||||
metricPoint.setName(metric);
|
||||
|
||||
metricMap.put(metric, metricPoint);
|
||||
}
|
||||
|
||||
return metricMap;
|
||||
}
|
||||
}
|
||||
@@ -62,11 +62,6 @@ public class DslConstant {
|
||||
|
||||
public static final String LIST_PARTITION_LATEST_METRICS_BY_TOPIC = "PartitionMetricESDAO/listPartitionLatestMetricsByTopic";
|
||||
|
||||
/**************************************************** REPLICATION ****************************************************/
|
||||
public static final String GET_REPLICATION_AGG_SINGLE_METRICS = "ReplicationMetricESDAO/getAggSingleReplicationMetrics";
|
||||
|
||||
public static final String GET_REPLICATION_LATEST_METRICS = "ReplicationMetricESDAO/getReplicationLatestMetrics";
|
||||
|
||||
/**************************************************** Group ****************************************************/
|
||||
public static final String GET_GROUP_TOPIC_PARTITION = "GroupMetricESDAO/getTopicPartitionOfGroup";
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ public class TemplateConstant {
|
||||
public static final String BROKER_INDEX = "ks_kafka_broker_metric";
|
||||
public static final String PARTITION_INDEX = "ks_kafka_partition_metric";
|
||||
public static final String GROUP_INDEX = "ks_kafka_group_metric";
|
||||
public static final String REPLICATION_INDEX = "ks_kafka_replication_metric";
|
||||
public static final String ZOOKEEPER_INDEX = "ks_kafka_zookeeper_metric";
|
||||
public static final String CONNECT_CLUSTER_INDEX = "ks_kafka_connect_cluster_metric";
|
||||
public static final String CONNECT_CONNECTOR_INDEX = "ks_kafka_connect_connector_metric";
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
{
|
||||
"size":0,
|
||||
"query":{
|
||||
"bool":{
|
||||
"must":[
|
||||
{
|
||||
"term":{
|
||||
"clusterPhyId":{
|
||||
"value":%d
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"term":{
|
||||
"brokerId":{
|
||||
"value":%d
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"term":{
|
||||
"topic":{
|
||||
"value":"%s"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"term":{
|
||||
"partitionId":{
|
||||
"value":%d
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"range":{
|
||||
"timestamp":{
|
||||
"gte":%d,
|
||||
"lte":%d
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"aggs":{
|
||||
%s
|
||||
}
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
{
|
||||
"size": 1,
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [
|
||||
{
|
||||
"term": {
|
||||
"clusterPhyId": {
|
||||
"value": %d
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"term": {
|
||||
"brokerId": {
|
||||
"value": %d
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"term": {
|
||||
"topic": {
|
||||
"value": "%s"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"term": {
|
||||
"partitionId": {
|
||||
"value": %d
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"range": {
|
||||
"timestamp": {
|
||||
"gte": %d,
|
||||
"lte": %d
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"sort": [
|
||||
{
|
||||
"timestamp": {
|
||||
"order": "desc"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
{
|
||||
"order" : 10,
|
||||
"index_patterns" : [
|
||||
"ks_kafka_replication_metric*"
|
||||
],
|
||||
"settings" : {
|
||||
"index" : {
|
||||
"number_of_shards" : "10"
|
||||
}
|
||||
},
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"brokerId" : {
|
||||
"type" : "long"
|
||||
},
|
||||
"partitionId" : {
|
||||
"type" : "long"
|
||||
},
|
||||
"routingValue" : {
|
||||
"type" : "text",
|
||||
"fields" : {
|
||||
"keyword" : {
|
||||
"ignore_above" : 256,
|
||||
"type" : "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"clusterPhyId" : {
|
||||
"type" : "long"
|
||||
},
|
||||
"topic" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"metrics" : {
|
||||
"properties" : {
|
||||
"LogStartOffset" : {
|
||||
"type" : "float"
|
||||
},
|
||||
"Messages" : {
|
||||
"type" : "float"
|
||||
},
|
||||
"LogEndOffset" : {
|
||||
"type" : "float"
|
||||
}
|
||||
}
|
||||
},
|
||||
"key" : {
|
||||
"type" : "text",
|
||||
"fields" : {
|
||||
"keyword" : {
|
||||
"ignore_above" : 256,
|
||||
"type" : "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"timestamp" : {
|
||||
"format" : "yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis",
|
||||
"index" : true,
|
||||
"type" : "date",
|
||||
"doc_values" : true
|
||||
}
|
||||
}
|
||||
},
|
||||
"aliases" : { }
|
||||
}
|
||||
Reference in New Issue
Block a user