mirror of
https://github.com/didi/KnowStreaming.git
synced 2025-12-24 11:52:08 +08:00
v2.1版本更新
This commit is contained in:
@@ -46,4 +46,15 @@ public class TopicCreationConstant {
|
|||||||
public static final String TOPIC_NAME_PREFIX_RU = "ru01_";
|
public static final String TOPIC_NAME_PREFIX_RU = "ru01_";
|
||||||
|
|
||||||
public static final Integer TOPIC_NAME_MAX_LENGTH = 255;
|
public static final Integer TOPIC_NAME_MAX_LENGTH = 255;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 单次自动化审批, 默认允许的通过单子
|
||||||
|
*/
|
||||||
|
public static final Integer DEFAULT_MAX_PASSED_ORDER_NUM_PER_TASK = 1;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 单次自动化审批, 最多允许的通过单子
|
||||||
|
*/
|
||||||
|
public static final Integer MAX_PASSED_ORDER_NUM_PER_TASK = 200;
|
||||||
}
|
}
|
||||||
@@ -86,6 +86,8 @@ public enum ResultStatus {
|
|||||||
APP_ID_OR_PASSWORD_ILLEGAL(1000, "app or password illegal"),
|
APP_ID_OR_PASSWORD_ILLEGAL(1000, "app or password illegal"),
|
||||||
SYSTEM_CODE_ILLEGAL(1000, "system code illegal"),
|
SYSTEM_CODE_ILLEGAL(1000, "system code illegal"),
|
||||||
|
|
||||||
|
CLUSTER_TASK_HOST_LIST_ILLEGAL(1000, "主机列表错误,请检查主机列表"),
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
package com.xiaojukeji.kafka.manager.common.entity.ao;
|
package com.xiaojukeji.kafka.manager.common.entity.ao;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -23,6 +24,8 @@ public class RdTopicBasic {
|
|||||||
|
|
||||||
private String description;
|
private String description;
|
||||||
|
|
||||||
|
private List<String> regionNameList;
|
||||||
|
|
||||||
public Long getClusterId() {
|
public Long getClusterId() {
|
||||||
return clusterId;
|
return clusterId;
|
||||||
}
|
}
|
||||||
@@ -87,6 +90,14 @@ public class RdTopicBasic {
|
|||||||
this.description = description;
|
this.description = description;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<String> getRegionNameList() {
|
||||||
|
return regionNameList;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRegionNameList(List<String> regionNameList) {
|
||||||
|
this.regionNameList = regionNameList;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "RdTopicBasic{" +
|
return "RdTopicBasic{" +
|
||||||
@@ -98,6 +109,7 @@ public class RdTopicBasic {
|
|||||||
", appName='" + appName + '\'' +
|
", appName='" + appName + '\'' +
|
||||||
", properties=" + properties +
|
", properties=" + properties +
|
||||||
", description='" + description + '\'' +
|
", description='" + description + '\'' +
|
||||||
|
", regionNameList='" + regionNameList + '\'' +
|
||||||
'}';
|
'}';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,5 +1,8 @@
|
|||||||
package com.xiaojukeji.kafka.manager.common.entity.ao.config;
|
package com.xiaojukeji.kafka.manager.common.entity.ao.config;
|
||||||
|
|
||||||
|
import com.xiaojukeji.kafka.manager.common.constant.TopicCreationConstant;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -7,8 +10,27 @@ import java.util.List;
|
|||||||
* @date 20/7/24
|
* @date 20/7/24
|
||||||
*/
|
*/
|
||||||
public class CreateTopicConfig {
|
public class CreateTopicConfig {
|
||||||
|
/**
|
||||||
|
* 单次自动化审批, 允许的通过单子
|
||||||
|
*/
|
||||||
|
private Integer maxPassedOrderNumPerTask;
|
||||||
|
|
||||||
private List<CreateTopicElemConfig> configList;
|
private List<CreateTopicElemConfig> configList;
|
||||||
|
|
||||||
|
public Integer getMaxPassedOrderNumPerTask() {
|
||||||
|
if (ValidateUtils.isNull(maxPassedOrderNumPerTask)) {
|
||||||
|
return TopicCreationConstant.DEFAULT_MAX_PASSED_ORDER_NUM_PER_TASK;
|
||||||
|
}
|
||||||
|
if (maxPassedOrderNumPerTask > TopicCreationConstant.MAX_PASSED_ORDER_NUM_PER_TASK) {
|
||||||
|
return TopicCreationConstant.MAX_PASSED_ORDER_NUM_PER_TASK;
|
||||||
|
}
|
||||||
|
return maxPassedOrderNumPerTask;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setMaxPassedOrderNumPerTask(Integer maxPassedOrderNumPerTask) {
|
||||||
|
this.maxPassedOrderNumPerTask = maxPassedOrderNumPerTask;
|
||||||
|
}
|
||||||
|
|
||||||
public List<CreateTopicElemConfig> getConfigList() {
|
public List<CreateTopicElemConfig> getConfigList() {
|
||||||
return configList;
|
return configList;
|
||||||
}
|
}
|
||||||
@@ -20,7 +42,8 @@ public class CreateTopicConfig {
|
|||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "CreateTopicConfig{" +
|
return "CreateTopicConfig{" +
|
||||||
"configList=" + configList +
|
"maxPassedOrderNumPerTask=" + maxPassedOrderNumPerTask +
|
||||||
|
", configList=" + configList +
|
||||||
'}';
|
'}';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,5 +1,7 @@
|
|||||||
package com.xiaojukeji.kafka.manager.common.entity.ao.topic;
|
package com.xiaojukeji.kafka.manager.common.entity.ao.topic;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author arthur
|
* @author arthur
|
||||||
* @date 2018/09/03
|
* @date 2018/09/03
|
||||||
@@ -17,7 +19,7 @@ public class TopicBasicDTO {
|
|||||||
|
|
||||||
private String description;
|
private String description;
|
||||||
|
|
||||||
private String region;
|
private List<String> regionNameList;
|
||||||
|
|
||||||
private Integer score;
|
private Integer score;
|
||||||
|
|
||||||
@@ -83,12 +85,12 @@ public class TopicBasicDTO {
|
|||||||
this.description = description;
|
this.description = description;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getRegion() {
|
public List<String> getRegionNameList() {
|
||||||
return region;
|
return regionNameList;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setRegion(String region) {
|
public void setRegionNameList(List<String> regionNameList) {
|
||||||
this.region = region;
|
this.regionNameList = regionNameList;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Integer getScore() {
|
public Integer getScore() {
|
||||||
@@ -164,7 +166,7 @@ public class TopicBasicDTO {
|
|||||||
", principals='" + principals + '\'' +
|
", principals='" + principals + '\'' +
|
||||||
", topicName='" + topicName + '\'' +
|
", topicName='" + topicName + '\'' +
|
||||||
", description='" + description + '\'' +
|
", description='" + description + '\'' +
|
||||||
", region='" + region + '\'' +
|
", regionNameList='" + regionNameList + '\'' +
|
||||||
", score=" + score +
|
", score=" + score +
|
||||||
", topicCodeC='" + topicCodeC + '\'' +
|
", topicCodeC='" + topicCodeC + '\'' +
|
||||||
", partitionNum=" + partitionNum +
|
", partitionNum=" + partitionNum +
|
||||||
|
|||||||
@@ -18,6 +18,8 @@ public class TopicOverview {
|
|||||||
|
|
||||||
private Object byteIn;
|
private Object byteIn;
|
||||||
|
|
||||||
|
private Object byteOut;
|
||||||
|
|
||||||
private Object produceRequest;
|
private Object produceRequest;
|
||||||
|
|
||||||
private String appName;
|
private String appName;
|
||||||
@@ -78,6 +80,14 @@ public class TopicOverview {
|
|||||||
this.byteIn = byteIn;
|
this.byteIn = byteIn;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Object getByteOut() {
|
||||||
|
return byteOut;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setByteOut(Object byteOut) {
|
||||||
|
this.byteOut = byteOut;
|
||||||
|
}
|
||||||
|
|
||||||
public Object getProduceRequest() {
|
public Object getProduceRequest() {
|
||||||
return produceRequest;
|
return produceRequest;
|
||||||
}
|
}
|
||||||
@@ -135,6 +145,7 @@ public class TopicOverview {
|
|||||||
", partitionNum=" + partitionNum +
|
", partitionNum=" + partitionNum +
|
||||||
", retentionTime=" + retentionTime +
|
", retentionTime=" + retentionTime +
|
||||||
", byteIn=" + byteIn +
|
", byteIn=" + byteIn +
|
||||||
|
", byteOut=" + byteOut +
|
||||||
", produceRequest=" + produceRequest +
|
", produceRequest=" + produceRequest +
|
||||||
", appName='" + appName + '\'' +
|
", appName='" + appName + '\'' +
|
||||||
", appId='" + appId + '\'' +
|
", appId='" + appId + '\'' +
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package com.xiaojukeji.kafka.manager.common.entity.dto.rd;
|
package com.xiaojukeji.kafka.manager.common.entity.dto.rd;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.bizenum.ClusterModeEnum;
|
||||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||||
import io.swagger.annotations.ApiModel;
|
import io.swagger.annotations.ApiModel;
|
||||||
import io.swagger.annotations.ApiModelProperty;
|
import io.swagger.annotations.ApiModelProperty;
|
||||||
@@ -108,10 +109,13 @@ public class LogicalClusterDTO {
|
|||||||
if (ValidateUtils.isNull(clusterId)
|
if (ValidateUtils.isNull(clusterId)
|
||||||
|| ValidateUtils.isNull(clusterId)
|
|| ValidateUtils.isNull(clusterId)
|
||||||
|| ValidateUtils.isEmptyList(regionIdList)
|
|| ValidateUtils.isEmptyList(regionIdList)
|
||||||
|| ValidateUtils.isNull(appId)
|
|
||||||
|| ValidateUtils.isNull(mode)) {
|
|| ValidateUtils.isNull(mode)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
if (!ClusterModeEnum.SHARED_MODE.getCode().equals(mode) && ValidateUtils.isNull(appId)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
appId = ValidateUtils.isNull(appId)? "": appId;
|
||||||
description = ValidateUtils.isNull(description)? "": description;
|
description = ValidateUtils.isNull(description)? "": description;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
package com.xiaojukeji.kafka.manager.common.entity.metrics;
|
package com.xiaojukeji.kafka.manager.common.entity.metrics;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author zengqiao
|
* @author zengqiao
|
||||||
* @date 20/6/17
|
* @date 20/6/17
|
||||||
@@ -11,6 +13,8 @@ public class TopicMetrics extends BaseMetrics {
|
|||||||
|
|
||||||
private String topicName;
|
private String topicName;
|
||||||
|
|
||||||
|
private List<BrokerMetrics> brokerMetricsList;
|
||||||
|
|
||||||
public TopicMetrics(Long clusterId, String topicName) {
|
public TopicMetrics(Long clusterId, String topicName) {
|
||||||
super();
|
super();
|
||||||
this.clusterId = clusterId;
|
this.clusterId = clusterId;
|
||||||
@@ -24,6 +28,14 @@ public class TopicMetrics extends BaseMetrics {
|
|||||||
this.topicName = topicName;
|
this.topicName = topicName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public TopicMetrics(String appId, Long clusterId, String topicName, List<BrokerMetrics> brokerMetricsList) {
|
||||||
|
super();
|
||||||
|
this.appId = appId;
|
||||||
|
this.clusterId = clusterId;
|
||||||
|
this.topicName = topicName;
|
||||||
|
this.brokerMetricsList = brokerMetricsList;
|
||||||
|
}
|
||||||
|
|
||||||
public String getAppId() {
|
public String getAppId() {
|
||||||
return appId;
|
return appId;
|
||||||
}
|
}
|
||||||
@@ -36,6 +48,14 @@ public class TopicMetrics extends BaseMetrics {
|
|||||||
return topicName;
|
return topicName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setBrokerMetricsList(List<BrokerMetrics> brokerMetricsList) {
|
||||||
|
this.brokerMetricsList = brokerMetricsList;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<BrokerMetrics> getBrokerMetricsList() {
|
||||||
|
return brokerMetricsList;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "TopicMetrics{" +
|
return "TopicMetrics{" +
|
||||||
|
|||||||
@@ -28,6 +28,9 @@ public class TopicOverviewVO {
|
|||||||
@ApiModelProperty(value = "每秒流入流量(B)")
|
@ApiModelProperty(value = "每秒流入流量(B)")
|
||||||
private Object byteIn;
|
private Object byteIn;
|
||||||
|
|
||||||
|
@ApiModelProperty(value = "每秒流出流量(B)")
|
||||||
|
private Object byteOut;
|
||||||
|
|
||||||
@ApiModelProperty(value = "发送请求数(个/秒)")
|
@ApiModelProperty(value = "发送请求数(个/秒)")
|
||||||
private Object produceRequest;
|
private Object produceRequest;
|
||||||
|
|
||||||
@@ -94,6 +97,14 @@ public class TopicOverviewVO {
|
|||||||
this.byteIn = byteIn;
|
this.byteIn = byteIn;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Object getByteOut() {
|
||||||
|
return byteOut;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setByteOut(Object byteOut) {
|
||||||
|
this.byteOut = byteOut;
|
||||||
|
}
|
||||||
|
|
||||||
public Object getProduceRequest() {
|
public Object getProduceRequest() {
|
||||||
return produceRequest;
|
return produceRequest;
|
||||||
}
|
}
|
||||||
@@ -151,6 +162,7 @@ public class TopicOverviewVO {
|
|||||||
", partitionNum=" + partitionNum +
|
", partitionNum=" + partitionNum +
|
||||||
", retentionTime=" + retentionTime +
|
", retentionTime=" + retentionTime +
|
||||||
", byteIn=" + byteIn +
|
", byteIn=" + byteIn +
|
||||||
|
", byteOut=" + byteOut +
|
||||||
", produceRequest=" + produceRequest +
|
", produceRequest=" + produceRequest +
|
||||||
", appName='" + appName + '\'' +
|
", appName='" + appName + '\'' +
|
||||||
", appId='" + appId + '\'' +
|
", appId='" + appId + '\'' +
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic;
|
|||||||
import io.swagger.annotations.ApiModel;
|
import io.swagger.annotations.ApiModel;
|
||||||
import io.swagger.annotations.ApiModelProperty;
|
import io.swagger.annotations.ApiModelProperty;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Topic的基本信息
|
* Topic的基本信息
|
||||||
* @author zengqiao
|
* @author zengqiao
|
||||||
@@ -49,6 +51,9 @@ public class TopicBasicVO {
|
|||||||
@ApiModelProperty(value = "集群地址")
|
@ApiModelProperty(value = "集群地址")
|
||||||
private String bootstrapServers;
|
private String bootstrapServers;
|
||||||
|
|
||||||
|
@ApiModelProperty(value = "所属region")
|
||||||
|
private List<String> regionNameList;
|
||||||
|
|
||||||
public Long getClusterId() {
|
public Long getClusterId() {
|
||||||
return clusterId;
|
return clusterId;
|
||||||
}
|
}
|
||||||
@@ -153,6 +158,14 @@ public class TopicBasicVO {
|
|||||||
this.score = score;
|
this.score = score;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<String> getRegionNameList() {
|
||||||
|
return regionNameList;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRegionNameList(List<String> regionNameList) {
|
||||||
|
this.regionNameList = regionNameList;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "TopicBasicVO{" +
|
return "TopicBasicVO{" +
|
||||||
@@ -169,6 +182,7 @@ public class TopicBasicVO {
|
|||||||
", topicCodeC='" + topicCodeC + '\'' +
|
", topicCodeC='" + topicCodeC + '\'' +
|
||||||
", description='" + description + '\'' +
|
", description='" + description + '\'' +
|
||||||
", bootstrapServers='" + bootstrapServers + '\'' +
|
", bootstrapServers='" + bootstrapServers + '\'' +
|
||||||
|
", regionNameList=" + regionNameList +
|
||||||
'}';
|
'}';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,39 @@
|
|||||||
|
package com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* author: mrazkonglingxu
|
||||||
|
* Date: 2020/12/7
|
||||||
|
* Time: 7:40 下午
|
||||||
|
*/
|
||||||
|
public class TopicBrokerRequestTimeVO {
|
||||||
|
|
||||||
|
private Long clusterId;
|
||||||
|
|
||||||
|
private Integer brokerId;
|
||||||
|
|
||||||
|
private TopicRequestTimeDetailVO brokerRequestTime;
|
||||||
|
|
||||||
|
public Long getClusterId() {
|
||||||
|
return clusterId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setClusterId(Long clusterId) {
|
||||||
|
this.clusterId = clusterId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getBrokerId() {
|
||||||
|
return brokerId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBrokerId(Integer brokerId) {
|
||||||
|
this.brokerId = brokerId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public TopicRequestTimeDetailVO getBrokerRequestTime() {
|
||||||
|
return brokerRequestTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBrokerRequestTime(TopicRequestTimeDetailVO brokerRequestTime) {
|
||||||
|
this.brokerRequestTime = brokerRequestTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,6 +3,8 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic;
|
|||||||
import io.swagger.annotations.ApiModel;
|
import io.swagger.annotations.ApiModel;
|
||||||
import io.swagger.annotations.ApiModelProperty;
|
import io.swagger.annotations.ApiModelProperty;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author zengqiao
|
* @author zengqiao
|
||||||
* @date 20/4/8
|
* @date 20/4/8
|
||||||
@@ -33,6 +35,8 @@ public class TopicRequestTimeDetailVO {
|
|||||||
@ApiModelProperty(value = "totalTimeMs")
|
@ApiModelProperty(value = "totalTimeMs")
|
||||||
private Object totalTimeMs;
|
private Object totalTimeMs;
|
||||||
|
|
||||||
|
private List<TopicBrokerRequestTimeVO> brokerRequestTimeList;
|
||||||
|
|
||||||
public String getRequestTimeType() {
|
public String getRequestTimeType() {
|
||||||
return requestTimeType;
|
return requestTimeType;
|
||||||
}
|
}
|
||||||
@@ -97,6 +101,14 @@ public class TopicRequestTimeDetailVO {
|
|||||||
this.totalTimeMs = totalTimeMs;
|
this.totalTimeMs = totalTimeMs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<TopicBrokerRequestTimeVO> getBrokerRequestTimeList() {
|
||||||
|
return brokerRequestTimeList;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBrokerRequestTimeList(List<TopicBrokerRequestTimeVO> brokerRequestTimeList) {
|
||||||
|
this.brokerRequestTimeList = brokerRequestTimeList;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "TopicRequestTimeDetailVO{" +
|
return "TopicRequestTimeDetailVO{" +
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.rd;
|
|||||||
import io.swagger.annotations.ApiModel;
|
import io.swagger.annotations.ApiModel;
|
||||||
import io.swagger.annotations.ApiModelProperty;
|
import io.swagger.annotations.ApiModelProperty;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -35,6 +36,9 @@ public class RdTopicBasicVO {
|
|||||||
@ApiModelProperty(value = "备注")
|
@ApiModelProperty(value = "备注")
|
||||||
private String description;
|
private String description;
|
||||||
|
|
||||||
|
@ApiModelProperty(value = "所属region")
|
||||||
|
private List<String> regionNameList;
|
||||||
|
|
||||||
public Long getClusterId() {
|
public Long getClusterId() {
|
||||||
return clusterId;
|
return clusterId;
|
||||||
}
|
}
|
||||||
@@ -99,6 +103,14 @@ public class RdTopicBasicVO {
|
|||||||
this.description = description;
|
this.description = description;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<String> getRegionNameList() {
|
||||||
|
return regionNameList;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRegionNameList(List<String> regionNameList) {
|
||||||
|
this.regionNameList = regionNameList;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "RdTopicBasicVO{" +
|
return "RdTopicBasicVO{" +
|
||||||
@@ -110,6 +122,7 @@ public class RdTopicBasicVO {
|
|||||||
", appName='" + appName + '\'' +
|
", appName='" + appName + '\'' +
|
||||||
", properties=" + properties +
|
", properties=" + properties +
|
||||||
", description='" + description + '\'' +
|
", description='" + description + '\'' +
|
||||||
|
", regionNameList='" + regionNameList + '\'' +
|
||||||
'}';
|
'}';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -64,7 +64,7 @@ public class JsonUtils {
|
|||||||
TopicConnectionDO connectionDO = new TopicConnectionDO();
|
TopicConnectionDO connectionDO = new TopicConnectionDO();
|
||||||
|
|
||||||
String[] appIdDetailArray = appIdDetail.toString().split("#");
|
String[] appIdDetailArray = appIdDetail.toString().split("#");
|
||||||
if (appIdDetailArray.length == 3) {
|
if (appIdDetailArray.length >= 3) {
|
||||||
connectionDO.setAppId(appIdDetailArray[0]);
|
connectionDO.setAppId(appIdDetailArray[0]);
|
||||||
connectionDO.setIp(appIdDetailArray[1]);
|
connectionDO.setIp(appIdDetailArray[1]);
|
||||||
connectionDO.setClientVersion(appIdDetailArray[2]);
|
connectionDO.setClientVersion(appIdDetailArray[2]);
|
||||||
|
|||||||
@@ -170,7 +170,10 @@ public class MbeanNameUtilV2 {
|
|||||||
new MbeanV2(
|
new MbeanV2(
|
||||||
"TopicCodeC",
|
"TopicCodeC",
|
||||||
JmxAttributeEnum.VALUE_ATTRIBUTE,
|
JmxAttributeEnum.VALUE_ATTRIBUTE,
|
||||||
"kafka.server:type=ReplicaManager,name=TopicCodeC"
|
Arrays.asList(
|
||||||
|
new AbstractMap.SimpleEntry<>(KafkaVersion.VERSION_0_10_3, "kafka.server:type=ReplicaManager,name=TopicCodeC"),
|
||||||
|
new AbstractMap.SimpleEntry<>(KafkaVersion.VERSION_MAX, "kafka.server:type=AppIdTopicMetrics,name=RecordCompression,appId=")
|
||||||
|
)
|
||||||
),
|
),
|
||||||
Arrays.asList(
|
Arrays.asList(
|
||||||
KafkaMetricsCollections.TOPIC_BASIC_PAGE_METRICS
|
KafkaMetricsCollections.TOPIC_BASIC_PAGE_METRICS
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ package com.xiaojukeji.kafka.manager.common.zookeeper;
|
|||||||
public class ZkPathUtil {
|
public class ZkPathUtil {
|
||||||
private static final String ZOOKEEPER_SEPARATOR = "/";
|
private static final String ZOOKEEPER_SEPARATOR = "/";
|
||||||
|
|
||||||
private static final String BROKER_ROOT_NODE = ZOOKEEPER_SEPARATOR + "brokers";
|
public static final String BROKER_ROOT_NODE = ZOOKEEPER_SEPARATOR + "brokers";
|
||||||
|
|
||||||
public static final String CONTROLLER_ROOT_NODE = ZOOKEEPER_SEPARATOR + "controller";
|
public static final String CONTROLLER_ROOT_NODE = ZOOKEEPER_SEPARATOR + "controller";
|
||||||
|
|
||||||
|
|||||||
@@ -79,6 +79,7 @@ public class LogicalClusterMetadataManager {
|
|||||||
|
|
||||||
Long logicalClusterId = logicalClusterIdMap.get(topicName);
|
Long logicalClusterId = logicalClusterIdMap.get(topicName);
|
||||||
if (ValidateUtils.isNull(logicalClusterId)) {
|
if (ValidateUtils.isNull(logicalClusterId)) {
|
||||||
|
LOGGER.debug("class=LogicalClusterMetadataManager||method=getTopicLogicalCluster||topicName={}||msg=logicalClusterId is null!",topicName);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return LOGICAL_CLUSTER_MAP.get(logicalClusterId);
|
return LOGICAL_CLUSTER_MAP.get(logicalClusterId);
|
||||||
@@ -107,6 +108,7 @@ public class LogicalClusterMetadataManager {
|
|||||||
|
|
||||||
public Long getPhysicalClusterId(Long logicalClusterId) {
|
public Long getPhysicalClusterId(Long logicalClusterId) {
|
||||||
if (ValidateUtils.isNull(logicalClusterId)) {
|
if (ValidateUtils.isNull(logicalClusterId)) {
|
||||||
|
LOGGER.debug("class=LogicalClusterMetadataManager||method=getPhysicalClusterId||msg=logicalClusterId is null!");
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (!LOADED.get()) {
|
if (!LOADED.get()) {
|
||||||
@@ -114,6 +116,7 @@ public class LogicalClusterMetadataManager {
|
|||||||
}
|
}
|
||||||
LogicalClusterDO logicalClusterDO = LOGICAL_CLUSTER_MAP.get(logicalClusterId);
|
LogicalClusterDO logicalClusterDO = LOGICAL_CLUSTER_MAP.get(logicalClusterId);
|
||||||
if (ValidateUtils.isNull(logicalClusterDO)) {
|
if (ValidateUtils.isNull(logicalClusterDO)) {
|
||||||
|
LOGGER.debug("class=LogicalClusterMetadataManager||method=getPhysicalClusterId||logicalClusterId={}||msg=logicalClusterDO is null!",logicalClusterId);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return logicalClusterDO.getClusterId();
|
return logicalClusterDO.getClusterId();
|
||||||
@@ -124,6 +127,7 @@ public class LogicalClusterMetadataManager {
|
|||||||
return clusterId;
|
return clusterId;
|
||||||
}
|
}
|
||||||
if (ValidateUtils.isNull(clusterId)) {
|
if (ValidateUtils.isNull(clusterId)) {
|
||||||
|
LOGGER.warn("class=LogicalClusterMetadataManager||method=getPhysicalClusterId||isPhysicalClusterId={}||msg=clusterId is null!",isPhysicalClusterId);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (!LOADED.get()) {
|
if (!LOADED.get()) {
|
||||||
@@ -131,6 +135,7 @@ public class LogicalClusterMetadataManager {
|
|||||||
}
|
}
|
||||||
LogicalClusterDO logicalClusterDO = LOGICAL_CLUSTER_MAP.get(clusterId);
|
LogicalClusterDO logicalClusterDO = LOGICAL_CLUSTER_MAP.get(clusterId);
|
||||||
if (ValidateUtils.isNull(logicalClusterDO)) {
|
if (ValidateUtils.isNull(logicalClusterDO)) {
|
||||||
|
LOGGER.debug("class=LogicalClusterMetadataManager||method=getPhysicalClusterId||clusterId={}||msg=logicalClusterDO is null!",clusterId);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return logicalClusterDO.getClusterId();
|
return logicalClusterDO.getClusterId();
|
||||||
@@ -171,8 +176,7 @@ public class LogicalClusterMetadataManager {
|
|||||||
for (Long regionId: regionIdList) {
|
for (Long regionId: regionIdList) {
|
||||||
RegionDO regionDO = regionMap.get(regionId);
|
RegionDO regionDO = regionMap.get(regionId);
|
||||||
if (ValidateUtils.isNull(regionDO) || !logicalClusterDO.getClusterId().equals(regionDO.getClusterId())) {
|
if (ValidateUtils.isNull(regionDO) || !logicalClusterDO.getClusterId().equals(regionDO.getClusterId())) {
|
||||||
LOGGER.warn("flush logical cluster metadata failed, exist illegal region, logicalCluster:{} region:{}.",
|
LOGGER.warn("flush logical cluster metadata failed, exist illegal region, logicalCluster:{} region:{}.", logicalClusterDO, regionId);
|
||||||
logicalClusterDO, regionId);
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
brokerIdSet.addAll(ListUtils.string2IntList(regionDO.getBrokerList()));
|
brokerIdSet.addAll(ListUtils.string2IntList(regionDO.getBrokerList()));
|
||||||
|
|||||||
@@ -86,19 +86,36 @@ public class PhysicalClusterMetadataManager {
|
|||||||
if (ZK_CONFIG_MAP.containsKey(clusterDO.getId())) {
|
if (ZK_CONFIG_MAP.containsKey(clusterDO.getId())) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
ZkConfigImpl zkConfig = new ZkConfigImpl(clusterDO.getZookeeper());
|
ZkConfigImpl zkConfig = new ZkConfigImpl(clusterDO.getZookeeper());
|
||||||
|
|
||||||
//增加Broker监控
|
// 初始化broker-map
|
||||||
BROKER_METADATA_MAP.put(clusterDO.getId(), new ConcurrentHashMap<>());
|
BROKER_METADATA_MAP.put(clusterDO.getId(), new ConcurrentHashMap<>());
|
||||||
JMX_CONNECTOR_MAP.put(clusterDO.getId(), new ConcurrentHashMap<>());
|
JMX_CONNECTOR_MAP.put(clusterDO.getId(), new ConcurrentHashMap<>());
|
||||||
KAFKA_VERSION_MAP.put(clusterDO.getId(), new ConcurrentHashMap<>());
|
KAFKA_VERSION_MAP.put(clusterDO.getId(), new ConcurrentHashMap<>());
|
||||||
|
|
||||||
|
// 初始化topic-map
|
||||||
|
TOPIC_METADATA_MAP.put(clusterDO.getId(), new ConcurrentHashMap<>());
|
||||||
|
TOPIC_RETENTION_TIME_MAP.put(clusterDO.getId(), new ConcurrentHashMap<>());
|
||||||
|
|
||||||
|
// 初始化cluster-map
|
||||||
|
CLUSTER_MAP.put(clusterDO.getId(), clusterDO);
|
||||||
|
|
||||||
|
if (!zkConfig.checkPathExists(ZkPathUtil.BROKER_ROOT_NODE)) {
|
||||||
|
LOGGER.info("ignore add cluster, zk path=/brokers not exist, clusterId:{}.", clusterDO.getId());
|
||||||
|
try {
|
||||||
|
zkConfig.close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.warn("ignore add cluster, close zk connection failed, cluster:{}.", clusterDO, e);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
//增加Broker监控
|
||||||
BrokerStateListener brokerListener = new BrokerStateListener(clusterDO.getId(), zkConfig, configUtils.getJmxMaxConn());
|
BrokerStateListener brokerListener = new BrokerStateListener(clusterDO.getId(), zkConfig, configUtils.getJmxMaxConn());
|
||||||
brokerListener.init();
|
brokerListener.init();
|
||||||
zkConfig.watchChildren(ZkPathUtil.BROKER_IDS_ROOT, brokerListener);
|
zkConfig.watchChildren(ZkPathUtil.BROKER_IDS_ROOT, brokerListener);
|
||||||
|
|
||||||
//增加Topic监控
|
//增加Topic监控
|
||||||
TOPIC_METADATA_MAP.put(clusterDO.getId(), new ConcurrentHashMap<>());
|
|
||||||
TopicStateListener topicListener = new TopicStateListener(clusterDO.getId(), zkConfig);
|
TopicStateListener topicListener = new TopicStateListener(clusterDO.getId(), zkConfig);
|
||||||
topicListener.init();
|
topicListener.init();
|
||||||
zkConfig.watchChildren(ZkPathUtil.BROKER_TOPICS_ROOT, topicListener);
|
zkConfig.watchChildren(ZkPathUtil.BROKER_TOPICS_ROOT, topicListener);
|
||||||
@@ -109,10 +126,6 @@ public class PhysicalClusterMetadataManager {
|
|||||||
controllerListener.init();
|
controllerListener.init();
|
||||||
zkConfig.watch(ZkPathUtil.CONTROLLER_ROOT_NODE, controllerListener);
|
zkConfig.watch(ZkPathUtil.CONTROLLER_ROOT_NODE, controllerListener);
|
||||||
|
|
||||||
//增加Config变更监控
|
|
||||||
TOPIC_RETENTION_TIME_MAP.put(clusterDO.getId(), new ConcurrentHashMap<>());
|
|
||||||
|
|
||||||
CLUSTER_MAP.put(clusterDO.getId(), clusterDO);
|
|
||||||
ZK_CONFIG_MAP.put(clusterDO.getId(), zkConfig);
|
ZK_CONFIG_MAP.put(clusterDO.getId(), zkConfig);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("add cluster failed, cluster:{}.", clusterDO, e);
|
LOGGER.error("add cluster failed, cluster:{}.", clusterDO, e);
|
||||||
@@ -444,8 +457,16 @@ public class PhysicalClusterMetadataManager {
|
|||||||
return kafkaVersion;
|
return kafkaVersion;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getKafkaVersion(Long clusterId) {
|
public String getKafkaVersionFromCache(Long clusterId) {
|
||||||
return getKafkaVersion(clusterId, PhysicalClusterMetadataManager.getBrokerIdList(clusterId));
|
Set<String> kafkaVersionSet = new HashSet<>();
|
||||||
|
for (Integer brokerId: PhysicalClusterMetadataManager.getBrokerIdList(clusterId)) {
|
||||||
|
String kafkaVersion = this.getKafkaVersionFromCache(clusterId, brokerId);
|
||||||
|
if (ValidateUtils.isBlank(kafkaVersion)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
kafkaVersionSet.add(kafkaVersion);
|
||||||
|
}
|
||||||
|
return ListUtils.strList2String(new ArrayList<>(kafkaVersionSet));
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getKafkaVersion(Long clusterId, List<Integer> brokerIdList) {
|
public String getKafkaVersion(Long clusterId, List<Integer> brokerIdList) {
|
||||||
|
|||||||
@@ -31,6 +31,8 @@ public interface ConfigService {
|
|||||||
|
|
||||||
List<ConfigDO> listAll();
|
List<ConfigDO> listAll();
|
||||||
|
|
||||||
|
Integer getAutoPassedTopicApplyOrderNumPerTask();
|
||||||
|
|
||||||
CreateTopicElemConfig getCreateTopicConfig(Long clusterId, String systemCode);
|
CreateTopicElemConfig getCreateTopicConfig(Long clusterId, String systemCode);
|
||||||
|
|
||||||
ClusterDO getClusterDO(Long clusterId);
|
ClusterDO getClusterDO(Long clusterId);
|
||||||
|
|||||||
@@ -31,6 +31,11 @@ public interface JmxService {
|
|||||||
|
|
||||||
TopicMetrics getTopicMetrics(Long clusterId, Integer brokerId, String topicName, Integer metricsCode, Boolean byAdd);
|
TopicMetrics getTopicMetrics(Long clusterId, Integer brokerId, String topicName, Integer metricsCode, Boolean byAdd);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取topic消息压缩指标
|
||||||
|
*/
|
||||||
|
String getTopicCodeCValue(Long clusterId, String topicName);
|
||||||
|
|
||||||
List<TopicMetrics> getTopicMetrics(Long clusterId, Integer metricsCode, Boolean byAdd);
|
List<TopicMetrics> getTopicMetrics(Long clusterId, Integer metricsCode, Boolean byAdd);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ public interface TopicService {
|
|||||||
/**
|
/**
|
||||||
* 获取Topic的分区的offset
|
* 获取Topic的分区的offset
|
||||||
*/
|
*/
|
||||||
Map<TopicPartition, Long> getPartitionOffset(ClusterDO cluster, String topicName, OffsetPosEnum offsetPosEnum);
|
Map<TopicPartition, Long> getPartitionOffset(ClusterDO clusterDO, String topicName, OffsetPosEnum offsetPosEnum);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 获取Topic概览信息
|
* 获取Topic概览信息
|
||||||
|
|||||||
@@ -51,6 +51,13 @@ public interface AppService {
|
|||||||
*/
|
*/
|
||||||
List<AppDO> getByPrincipal(String principal);
|
List<AppDO> getByPrincipal(String principal);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 通过appId来查,需要check当前登录人是否有权限.
|
||||||
|
* @param appId appId
|
||||||
|
* @return AppDO
|
||||||
|
*/
|
||||||
|
AppDO getAppByUserAndId(String appId, String curUser);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 通过appId来查
|
* 通过appId来查
|
||||||
* @param appId appId
|
* @param appId appId
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import java.util.List;
|
|||||||
* @date 20/4/13
|
* @date 20/4/13
|
||||||
*/
|
*/
|
||||||
public interface TopicConnectionService {
|
public interface TopicConnectionService {
|
||||||
int batchAdd(List<TopicConnectionDO> doList);
|
void batchAdd(List<TopicConnectionDO> doList);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 查询连接信息
|
* 查询连接信息
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ import java.util.ArrayList;
|
|||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author zhongyuankai
|
* @author zhongyuankai
|
||||||
@@ -59,10 +60,13 @@ public class AppServiceImpl implements AppService {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private OperateRecordService operateRecordService;
|
private OperateRecordService operateRecordService;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ResultStatus addApp(AppDO appDO) {
|
public ResultStatus addApp(AppDO appDO) {
|
||||||
try {
|
try {
|
||||||
if (appDao.insert(appDO) < 1) {
|
if (appDao.insert(appDO) < 1) {
|
||||||
|
LOGGER.warn("class=AppServiceImpl||method=addApp||AppDO={}||msg=add fail,{}",appDO,ResultStatus.MYSQL_ERROR.getMessage());
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
KafkaUserDO kafkaUserDO = new KafkaUserDO();
|
KafkaUserDO kafkaUserDO = new KafkaUserDO();
|
||||||
@@ -72,6 +76,7 @@ public class AppServiceImpl implements AppService {
|
|||||||
kafkaUserDO.setUserType(0);
|
kafkaUserDO.setUserType(0);
|
||||||
kafkaUserDao.insert(kafkaUserDO);
|
kafkaUserDao.insert(kafkaUserDO);
|
||||||
} catch (DuplicateKeyException e) {
|
} catch (DuplicateKeyException e) {
|
||||||
|
LOGGER.error("class=AppServiceImpl||method=addApp||errMsg={}||appDO={}|", e.getMessage(), appDO, e);
|
||||||
return ResultStatus.RESOURCE_ALREADY_EXISTED;
|
return ResultStatus.RESOURCE_ALREADY_EXISTED;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("add app failed, appDO:{}.", appDO, e);
|
LOGGER.error("add app failed, appDO:{}.", appDO, e);
|
||||||
@@ -139,23 +144,42 @@ public class AppServiceImpl implements AppService {
|
|||||||
return ResultStatus.SUCCESS;
|
return ResultStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
} catch (DuplicateKeyException e) {
|
} catch (DuplicateKeyException e) {
|
||||||
|
LOGGER.error("class=AppServiceImpl||method=updateByAppId||errMsg={}||AppDTO={}||operator={}||adminApi={}", e.getMessage(), dto, operator, adminApi, e);
|
||||||
return ResultStatus.RESOURCE_NAME_DUPLICATED;
|
return ResultStatus.RESOURCE_NAME_DUPLICATED;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("update app failed, dto:{}, operator:{}, adminApi:{}.", dto, operator, adminApi, e);
|
LOGGER.error("update app failed, dto:{}, operator:{}, adminApi:{}.", dto, operator, adminApi, e);
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=AppServiceImpl||method=updateByAppId||dto={}||operator={}||adminApi={}||msg=update app fail,{}!", dto,operator,adminApi,ResultStatus.MYSQL_ERROR.getMessage());
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<AppDO> getByPrincipal(String principals) {
|
public List<AppDO> getByPrincipal(String principal) {
|
||||||
try {
|
try {
|
||||||
return appDao.getByPrincipal(principals);
|
List<AppDO> appDOs = appDao.getByPrincipal(principal);
|
||||||
|
if (!ValidateUtils.isEmptyList(appDOs)) {
|
||||||
|
return appDOs.stream()
|
||||||
|
.filter(appDO -> ListUtils.string2StrList(appDO.getPrincipals()).contains(principal))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("get app list failed, principals:{}.", principals);
|
LOGGER.error("get app list failed, principals:{}.", principal);
|
||||||
}
|
}
|
||||||
return new ArrayList<>();
|
return new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AppDO getAppByUserAndId(String appId, String curUser) {
|
||||||
|
AppDO appDO = this.getByAppId(appId);
|
||||||
|
if (appDO != null) {
|
||||||
|
if (ListUtils.string2StrList(appDO.getPrincipals()).contains(curUser)) {
|
||||||
|
return appDO;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
LOGGER.debug("class=AppServiceImpl||method=getAppByUserAndId||appId={}||curUser={}||msg=appDO is null!", appId, curUser);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AppDO getByAppId(String appId) {
|
public AppDO getByAppId(String appId) {
|
||||||
try {
|
try {
|
||||||
@@ -177,6 +201,7 @@ public class AppServiceImpl implements AppService {
|
|||||||
// 查询AppID
|
// 查询AppID
|
||||||
AppDO appDO = appDao.getByAppId(appId);
|
AppDO appDO = appDao.getByAppId(appId);
|
||||||
if (ValidateUtils.isNull(appDO)) {
|
if (ValidateUtils.isNull(appDO)) {
|
||||||
|
LOGGER.debug("class=AppServiceImpl||method=getAppTopicDTOList||appId={}||msg=appDO is null!", appId);
|
||||||
return new ArrayList<>();
|
return new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -220,6 +245,7 @@ public class AppServiceImpl implements AppService {
|
|||||||
appTopicDTO.setLogicalClusterId(logicalClusterDO.getId());
|
appTopicDTO.setLogicalClusterId(logicalClusterDO.getId());
|
||||||
appTopicDTO.setLogicalClusterName(logicalClusterDO.getName());
|
appTopicDTO.setLogicalClusterName(logicalClusterDO.getName());
|
||||||
} else {
|
} else {
|
||||||
|
LOGGER.warn("class=AppServiceImpl||method=getAppTopicDTOList||clusterId={}||topicName={}||msg=logicalClusterDO is null!", authorityDO.getClusterId(), authorityDO.getTopicName());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
appTopicDTO.setOperator("");
|
appTopicDTO.setOperator("");
|
||||||
|
|||||||
@@ -120,6 +120,7 @@ public class GatewayConfigServiceImpl implements GatewayConfigService {
|
|||||||
try {
|
try {
|
||||||
doList = gatewayConfigDao.getByConfigType(GatewayConfigKeyEnum.SD_SP_RATE.getConfigType());
|
doList = gatewayConfigDao.getByConfigType(GatewayConfigKeyEnum.SD_SP_RATE.getConfigType());
|
||||||
if (ValidateUtils.isEmptyList(doList)) {
|
if (ValidateUtils.isEmptyList(doList)) {
|
||||||
|
LOGGER.debug("class=GatewayConfigServiceImpl||method=getSpRateConfig||requestVersion={}||msg=doList is empty!",requestVersion);
|
||||||
return new SpRateConfig(Long.MIN_VALUE, new HashMap<>(0));
|
return new SpRateConfig(Long.MIN_VALUE, new HashMap<>(0));
|
||||||
}
|
}
|
||||||
Long maxVersion = Long.MIN_VALUE;
|
Long maxVersion = Long.MIN_VALUE;
|
||||||
|
|||||||
@@ -27,19 +27,20 @@ public class TopicConnectionServiceImpl implements TopicConnectionService {
|
|||||||
private TopicConnectionDao topicConnectionDao;
|
private TopicConnectionDao topicConnectionDao;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int batchAdd(List<TopicConnectionDO> doList) {
|
public void batchAdd(List<TopicConnectionDO> doList) {
|
||||||
if (ValidateUtils.isEmptyList(doList)) {
|
if (ValidateUtils.isEmptyList(doList)) {
|
||||||
return 0;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
int count = 0;
|
int count = 0;
|
||||||
for (TopicConnectionDO connectionDO: doList) {
|
for (TopicConnectionDO connectionDO: doList) {
|
||||||
try {
|
try {
|
||||||
count += topicConnectionDao.replace(connectionDO);
|
count += topicConnectionDao.replace(connectionDO);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("replace topic connections failed, data:{}.", connectionDO);
|
LOGGER.error("class=TopicConnectionServiceImpl||method=batchAdd||connectionDO={}||errMsg={}", connectionDO, e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return count;
|
LOGGER.info("class=TopicConnectionServiceImpl||method=batchAdd||allSize={}||successSize={}", doList.size(), count);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|||||||
@@ -150,6 +150,8 @@ public class BrokerServiceImpl implements BrokerService {
|
|||||||
for (Integer brokerId: brokerIdSet) {
|
for (Integer brokerId: brokerIdSet) {
|
||||||
BrokerMetadata brokerMetadata = PhysicalClusterMetadataManager.getBrokerMetadata(clusterId, brokerId);
|
BrokerMetadata brokerMetadata = PhysicalClusterMetadataManager.getBrokerMetadata(clusterId, brokerId);
|
||||||
if (ValidateUtils.isNull(brokerMetadata)) {
|
if (ValidateUtils.isNull(brokerMetadata)) {
|
||||||
|
LOGGER.warn("class=BrokerServiceImpl||method=getBrokerOverviewList||brokerId={}|||msg=brokerMetadata is null!",
|
||||||
|
brokerId);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
overviewDTOMap.put(brokerId, BrokerOverviewDTO.newInstance(
|
overviewDTOMap.put(brokerId, BrokerOverviewDTO.newInstance(
|
||||||
|
|||||||
@@ -286,7 +286,7 @@ public class ClusterServiceImpl implements ClusterService {
|
|||||||
dto.setClusterName(clusterDO.getClusterName());
|
dto.setClusterName(clusterDO.getClusterName());
|
||||||
dto.setZookeeper(clusterDO.getZookeeper());
|
dto.setZookeeper(clusterDO.getZookeeper());
|
||||||
dto.setBootstrapServers(clusterDO.getBootstrapServers());
|
dto.setBootstrapServers(clusterDO.getBootstrapServers());
|
||||||
dto.setKafkaVersion(physicalClusterMetadataManager.getKafkaVersion(clusterDO.getId()));
|
dto.setKafkaVersion(physicalClusterMetadataManager.getKafkaVersionFromCache(clusterDO.getId()));
|
||||||
dto.setIdc(configUtils.getIdc());
|
dto.setIdc(configUtils.getIdc());
|
||||||
dto.setSecurityProperties(clusterDO.getSecurityProperties());
|
dto.setSecurityProperties(clusterDO.getSecurityProperties());
|
||||||
dto.setStatus(clusterDO.getStatus());
|
dto.setStatus(clusterDO.getStatus());
|
||||||
|
|||||||
@@ -42,6 +42,7 @@ public class ConfigServiceImpl implements ConfigService {
|
|||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("insert config failed, config:{}.", dto, e);
|
LOGGER.error("insert config failed, config:{}.", dto, e);
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=ConfigServiceImpl||method=insert||dto={}||msg=insert config fail,{}!", dto,ResultStatus.MYSQL_ERROR.getMessage());
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -54,10 +55,12 @@ public class ConfigServiceImpl implements ConfigService {
|
|||||||
if (configDao.deleteByKey(configKey) >= 1) {
|
if (configDao.deleteByKey(configKey) >= 1) {
|
||||||
return ResultStatus.SUCCESS;
|
return ResultStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=ConfigServiceImpl||method=deleteByKey||configKey={}||msg=delete config fail,{}!", configKey,ResultStatus.CONFIG_NOT_EXIST.getMessage());
|
||||||
return ResultStatus.CONFIG_NOT_EXIST;
|
return ResultStatus.CONFIG_NOT_EXIST;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("delete config failed, configKey:{}.", configKey, e);
|
LOGGER.error("delete config failed, configKey:{}.", configKey, e);
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=ConfigServiceImpl||method=deleteByKey||configKey={}||msg=delete config fail,{}!", configKey,ResultStatus.MYSQL_ERROR.getMessage());
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -67,10 +70,12 @@ public class ConfigServiceImpl implements ConfigService {
|
|||||||
if (configDao.updateByKey(convert2ConfigDO(dto)) >= 1) {
|
if (configDao.updateByKey(convert2ConfigDO(dto)) >= 1) {
|
||||||
return ResultStatus.SUCCESS;
|
return ResultStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=ConfigServiceImpl||method=updateByKey||dto={}||msg=update config fail,{}!", dto,ResultStatus.CONFIG_NOT_EXIST.getMessage());
|
||||||
return ResultStatus.CONFIG_NOT_EXIST;
|
return ResultStatus.CONFIG_NOT_EXIST;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("update config failed, config:{}.", dto, e);
|
LOGGER.error("update config failed, config:{}.", dto, e);
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=ConfigServiceImpl||method=deleteByKey||dto={}||msg=delete config fail,{}!", dto,ResultStatus.MYSQL_ERROR.getMessage());
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -84,10 +89,15 @@ public class ConfigServiceImpl implements ConfigService {
|
|||||||
if (configDao.updateByKey(configDO) >= 1) {
|
if (configDao.updateByKey(configDO) >= 1) {
|
||||||
return ResultStatus.SUCCESS;
|
return ResultStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=ConfigServiceImpl||method=updateByKey||configKey={}||configValue={}||msg=update config fail,{}!"
|
||||||
|
, configKey,configValue,ResultStatus.CONFIG_NOT_EXIST.getMessage());
|
||||||
return ResultStatus.CONFIG_NOT_EXIST;
|
return ResultStatus.CONFIG_NOT_EXIST;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("update config failed, configValue:{}.", configValue, e);
|
LOGGER.error("update config failed, configValue:{}.", configValue, e);
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=ConfigServiceImpl||method=deleteByKey||configKey={}||configValue={}||msg=delete config fail,{}!"
|
||||||
|
, configKey,configValue,ResultStatus.MYSQL_ERROR.getMessage());
|
||||||
|
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -161,6 +171,16 @@ public class ConfigServiceImpl implements ConfigService {
|
|||||||
return configDO;
|
return configDO;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Integer getAutoPassedTopicApplyOrderNumPerTask() {
|
||||||
|
String configKey = TopicCreationConstant.INNER_CREATE_TOPIC_CONFIG_KEY;
|
||||||
|
CreateTopicConfig configValue = this.getByKey(configKey, CreateTopicConfig.class);
|
||||||
|
if (ValidateUtils.isNull(configValue)) {
|
||||||
|
return TopicCreationConstant.DEFAULT_MAX_PASSED_ORDER_NUM_PER_TASK;
|
||||||
|
}
|
||||||
|
return configValue.getMaxPassedOrderNumPerTask();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CreateTopicElemConfig getCreateTopicConfig(Long clusterId, String systemCode) {
|
public CreateTopicElemConfig getCreateTopicConfig(Long clusterId, String systemCode) {
|
||||||
String configKey = TopicCreationConstant.INNER_CREATE_TOPIC_CONFIG_KEY;
|
String configKey = TopicCreationConstant.INNER_CREATE_TOPIC_CONFIG_KEY;
|
||||||
|
|||||||
@@ -110,6 +110,8 @@ public class ConsumerServiceImpl implements ConsumerService {
|
|||||||
ConsumerGroupDTO consumeGroupDTO) {
|
ConsumerGroupDTO consumeGroupDTO) {
|
||||||
TopicMetadata topicMetadata = PhysicalClusterMetadataManager.getTopicMetadata(clusterDO.getId(), topicName);
|
TopicMetadata topicMetadata = PhysicalClusterMetadataManager.getTopicMetadata(clusterDO.getId(), topicName);
|
||||||
if (topicMetadata == null) {
|
if (topicMetadata == null) {
|
||||||
|
logger.warn("class=ConsumerServiceImpl||method=getConsumeDetail||clusterId={}||topicName={}||msg=topicMetadata is null!",
|
||||||
|
clusterDO.getId(), topicName);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -120,6 +122,7 @@ public class ConsumerServiceImpl implements ConsumerService {
|
|||||||
consumerGroupDetailDTOList = getConsumerPartitionStateInBroker(clusterDO, topicMetadata, consumeGroupDTO);
|
consumerGroupDetailDTOList = getConsumerPartitionStateInBroker(clusterDO, topicMetadata, consumeGroupDTO);
|
||||||
}
|
}
|
||||||
if (consumerGroupDetailDTOList == null) {
|
if (consumerGroupDetailDTOList == null) {
|
||||||
|
logger.info("class=ConsumerServiceImpl||method=getConsumeDetail||msg=consumerGroupDetailDTOList is null!");
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -167,7 +170,7 @@ public class ConsumerServiceImpl implements ConsumerService {
|
|||||||
kafkaConsumer.close();
|
kafkaConsumer.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return new ArrayList<>();
|
return resultList;
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<Result> resetConsumerOffset(ClusterDO cluster, KafkaConsumer<String, String> kafkaConsumer, ConsumerGroupDTO consumerGroupDTO, Map<TopicPartition, Long> offsetMap) {
|
private List<Result> resetConsumerOffset(ClusterDO cluster, KafkaConsumer<String, String> kafkaConsumer, ConsumerGroupDTO consumerGroupDTO, Map<TopicPartition, Long> offsetMap) {
|
||||||
@@ -184,7 +187,9 @@ public class ConsumerServiceImpl implements ConsumerService {
|
|||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.error("reset failed, clusterId:{} consumerGroup:{} topic-partition:{}.", cluster.getId(), consumerGroupDTO, tp, e);
|
logger.error("reset failed, clusterId:{} consumerGroup:{} topic-partition:{}.", cluster.getId(), consumerGroupDTO, tp, e);
|
||||||
resultList.add(new Result());
|
resultList.add(new Result(
|
||||||
|
ResultStatus.OPERATION_FAILED.getCode(),
|
||||||
|
"reset failed..."));
|
||||||
}
|
}
|
||||||
resultList.add(new Result());
|
resultList.add(new Result());
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
package com.xiaojukeji.kafka.manager.service.service.impl;
|
package com.xiaojukeji.kafka.manager.service.service.impl;
|
||||||
|
|
||||||
|
import com.google.common.base.Joiner;
|
||||||
import com.xiaojukeji.kafka.manager.common.bizenum.KafkaClientEnum;
|
import com.xiaojukeji.kafka.manager.common.bizenum.KafkaClientEnum;
|
||||||
import com.xiaojukeji.kafka.manager.common.constant.KafkaMetricsCollections;
|
import com.xiaojukeji.kafka.manager.common.constant.KafkaMetricsCollections;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.KafkaVersion;
|
import com.xiaojukeji.kafka.manager.common.entity.KafkaVersion;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.metrics.BaseMetrics;
|
import com.xiaojukeji.kafka.manager.common.entity.metrics.BaseMetrics;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.metrics.TopicMetrics;
|
import com.xiaojukeji.kafka.manager.common.entity.metrics.TopicMetrics;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.metrics.BrokerMetrics;
|
import com.xiaojukeji.kafka.manager.common.entity.metrics.BrokerMetrics;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
|
||||||
import com.xiaojukeji.kafka.manager.common.utils.jmx.JmxConstant;
|
import com.xiaojukeji.kafka.manager.common.utils.jmx.JmxConstant;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.ao.PartitionAttributeDTO;
|
import com.xiaojukeji.kafka.manager.common.entity.ao.PartitionAttributeDTO;
|
||||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||||
@@ -123,11 +125,19 @@ public class JmxServiceImpl implements JmxService {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
TopicMetrics metrics = null;
|
TopicMetrics metrics = null;
|
||||||
|
List<BrokerMetrics> brokerMetricsList = new ArrayList<>();
|
||||||
for (Integer brokerId : topicMetadata.getBrokerIdSet()) {
|
for (Integer brokerId : topicMetadata.getBrokerIdSet()) {
|
||||||
TopicMetrics subMetrics = getTopicMetrics(clusterId, brokerId, topicName, metricsCode, byAdd);
|
TopicMetrics subMetrics = getTopicMetrics(clusterId, brokerId, topicName, metricsCode, byAdd);
|
||||||
|
|
||||||
if (ValidateUtils.isNull(subMetrics)) {
|
if (ValidateUtils.isNull(subMetrics)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
BrokerMetrics brokerMetrics = new BrokerMetrics(clusterId, brokerId);
|
||||||
|
brokerMetrics.setMetricsMap(subMetrics.getMetricsMap());
|
||||||
|
|
||||||
|
brokerMetricsList.add(brokerMetrics);
|
||||||
|
|
||||||
if (ValidateUtils.isNull(metrics)) {
|
if (ValidateUtils.isNull(metrics)) {
|
||||||
metrics = new TopicMetrics(clusterId, topicName);
|
metrics = new TopicMetrics(clusterId, topicName);
|
||||||
}
|
}
|
||||||
@@ -137,6 +147,10 @@ public class JmxServiceImpl implements JmxService {
|
|||||||
metrics.mergeByMax(subMetrics);
|
metrics.mergeByMax(subMetrics);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (!ValidateUtils.isNull(metrics)) {
|
||||||
|
metrics.setBrokerMetricsList(brokerMetricsList);
|
||||||
|
}
|
||||||
|
|
||||||
return metrics;
|
return metrics;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -169,6 +183,77 @@ public class JmxServiceImpl implements JmxService {
|
|||||||
return metrics;
|
return metrics;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getTopicCodeCValue(Long clusterId, String topicName) {
|
||||||
|
TopicMetadata topicMetadata = PhysicalClusterMetadataManager.getTopicMetadata(clusterId, topicName);
|
||||||
|
if (topicMetadata == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
MbeanV2 topicCodeCMBean = null;
|
||||||
|
List<MbeanV2> mbeanV2List = MbeanNameUtilV2.getMbeanList(KafkaMetricsCollections.TOPIC_BASIC_PAGE_METRICS);
|
||||||
|
if (!ValidateUtils.isEmptyList(mbeanV2List)) {
|
||||||
|
topicCodeCMBean = mbeanV2List.stream()
|
||||||
|
.filter(mbeanV2 -> "TopicCodeC".equals(mbeanV2.getFieldName()))
|
||||||
|
.findFirst()
|
||||||
|
.orElse(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (topicCodeCMBean == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
KafkaVersion kafkaVersion;
|
||||||
|
Set<String> codeCValues = new HashSet<>();
|
||||||
|
TopicMetrics metrics = new TopicMetrics(clusterId, topicName);
|
||||||
|
for (Integer brokerId : topicMetadata.getBrokerIdSet()) {
|
||||||
|
JmxConnectorWrap jmxConnectorWrap = PhysicalClusterMetadataManager.getJmxConnectorWrap(clusterId, brokerId);
|
||||||
|
if (ValidateUtils.isNull(jmxConnectorWrap)|| !jmxConnectorWrap.checkJmxConnectionAndInitIfNeed()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
kafkaVersion = physicalClusterMetadataManager.getKafkaVersion(clusterId, brokerId);
|
||||||
|
// 如果是高版本,需要获取指标{kafka.server:type=AppIdTopicMetrics,name=RecordCompression,appId=*,topic=xxx}
|
||||||
|
if (kafkaVersion.getVersionNum() > KafkaVersion.VERSION_0_10_3.longValue()) {
|
||||||
|
try {
|
||||||
|
ObjectName objectNameRegX = new ObjectName(topicCodeCMBean.getObjectName(kafkaVersion.getVersionNum())
|
||||||
|
+ "*,topic=" + topicName);
|
||||||
|
QueryExp exp = Query.match(Query.attr("Value"), Query.value("*"));
|
||||||
|
Set<ObjectName> objectNames = jmxConnectorWrap.queryNames(objectNameRegX, exp);
|
||||||
|
for (ObjectName objectName : objectNames) {
|
||||||
|
if (objectName.toString().indexOf(",appId=admin,") == -1) {
|
||||||
|
String value = (String) jmxConnectorWrap.getAttribute(objectName, "Value");
|
||||||
|
if (!codeCValues.contains(value)) {
|
||||||
|
codeCValues.add(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error("get topic codec metrics failed, clusterId:{} brokerId:{} topicName:{} mbean:{}.",
|
||||||
|
clusterId, brokerId, topicName, topicCodeCMBean, e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// 低版本沿用老逻辑...
|
||||||
|
try {
|
||||||
|
getAndSupplyAttributes2BaseMetrics(
|
||||||
|
metrics,
|
||||||
|
jmxConnectorWrap,
|
||||||
|
topicCodeCMBean,
|
||||||
|
new ObjectName(topicCodeCMBean.getObjectName(kafkaVersion.getVersionNum()) + ",topic=" + topicName)
|
||||||
|
);
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error("get topic codec metrics failed, clusterId:{} topicName:{} mbean:{}.",
|
||||||
|
clusterId, topicName, topicCodeCMBean, e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
codeCValues.addAll(ListUtils.string2StrList(metrics.getSpecifiedMetrics("TopicCodeCValue", String.class)));
|
||||||
|
|
||||||
|
return Joiner.on(",").join(codeCValues);
|
||||||
|
}
|
||||||
|
|
||||||
private void getAndSupplyAttributes2BaseMetrics(BaseMetrics metrics,
|
private void getAndSupplyAttributes2BaseMetrics(BaseMetrics metrics,
|
||||||
JmxConnectorWrap jmxConnectorWrap,
|
JmxConnectorWrap jmxConnectorWrap,
|
||||||
MbeanV2 mbeanV2,
|
MbeanV2 mbeanV2,
|
||||||
|
|||||||
@@ -88,6 +88,7 @@ public class LogicalClusterServiceImpl implements LogicalClusterService {
|
|||||||
public LogicalCluster getLogicalCluster(Long logicalClusterId) {
|
public LogicalCluster getLogicalCluster(Long logicalClusterId) {
|
||||||
LogicalClusterDO logicalClusterDO = logicClusterMetadataManager.getLogicalCluster(logicalClusterId);
|
LogicalClusterDO logicalClusterDO = logicClusterMetadataManager.getLogicalCluster(logicalClusterId);
|
||||||
if (ValidateUtils.isNull(logicalClusterDO)) {
|
if (ValidateUtils.isNull(logicalClusterDO)) {
|
||||||
|
LOGGER.warn("class=LogicalClusterServiceImpl||method=getLogicalCluster||logicalClusterId={}||msg=logicalClusterDO is null!", logicalClusterId);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return convert2LogicalCluster(logicalClusterDO);
|
return convert2LogicalCluster(logicalClusterDO);
|
||||||
@@ -223,8 +224,7 @@ public class LogicalClusterServiceImpl implements LogicalClusterService {
|
|||||||
return ResultStatus.SUCCESS;
|
return ResultStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
} catch (DuplicateKeyException e) {
|
} catch (DuplicateKeyException e) {
|
||||||
LOGGER.error("create logical cluster failed, name already existed, newLogicalClusterDO:{}.",
|
LOGGER.error("create logical cluster failed, name already existed, newLogicalClusterDO:{}.", logicalClusterDO, e);
|
||||||
logicalClusterDO, e);
|
|
||||||
return ResultStatus.RESOURCE_ALREADY_EXISTED;
|
return ResultStatus.RESOURCE_ALREADY_EXISTED;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("create logical cluster failed, mysql error, newLogicalClusterDO:{}.", logicalClusterDO, e);
|
LOGGER.error("create logical cluster failed, mysql error, newLogicalClusterDO:{}.", logicalClusterDO, e);
|
||||||
@@ -264,6 +264,7 @@ public class LogicalClusterServiceImpl implements LogicalClusterService {
|
|||||||
}
|
}
|
||||||
return ResultStatus.RESOURCE_NOT_EXIST;
|
return ResultStatus.RESOURCE_NOT_EXIST;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
LOGGER.error("class=LogicalClusterServiceImpl||method=getById||errMsg={}||logicalClusterId={}", e.getMessage(), logicalClusterId, e);
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -68,6 +68,8 @@ public class RegionServiceImpl implements RegionService {
|
|||||||
LOGGER.error("create region failed, newRegionDO:{}.", regionDO, e);
|
LOGGER.error("create region failed, newRegionDO:{}.", regionDO, e);
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
LOGGER.warn("class=RegionServiceImpl||method=createRegion||regionDO={}||msg=create region failed", regionDO);
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -107,6 +109,7 @@ public class RegionServiceImpl implements RegionService {
|
|||||||
if (regionDao.updateById(newRegionDO) > 0) {
|
if (regionDao.updateById(newRegionDO) > 0) {
|
||||||
return ResultStatus.SUCCESS;
|
return ResultStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=RegionServiceImpl||method=updateRegion||newRegionDO={}||msg=update region failed", newRegionDO);
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
List<Integer> newBrokerIdList = ListUtils.string2IntList(newRegionDO.getBrokerList());
|
List<Integer> newBrokerIdList = ListUtils.string2IntList(newRegionDO.getBrokerList());
|
||||||
@@ -125,6 +128,7 @@ public class RegionServiceImpl implements RegionService {
|
|||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("update region failed, newRegionDO:{}", newRegionDO, e);
|
LOGGER.error("update region failed, newRegionDO:{}", newRegionDO, e);
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=RegionServiceImpl||method=updateRegion||newRegionDO={}||msg=update region failed", newRegionDO);
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -65,9 +65,6 @@ public class TopicManagerServiceImpl implements TopicManagerService {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private LogicalClusterMetadataManager logicalClusterMetadataManager;
|
private LogicalClusterMetadataManager logicalClusterMetadataManager;
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private LogicalClusterService logicalClusterService;
|
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private JmxService jmxService;
|
private JmxService jmxService;
|
||||||
|
|
||||||
@@ -77,6 +74,9 @@ public class TopicManagerServiceImpl implements TopicManagerService {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private ClusterService clusterService;
|
private ClusterService clusterService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private RegionService regionService;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<TopicDO> listAll() {
|
public List<TopicDO> listAll() {
|
||||||
try {
|
try {
|
||||||
@@ -288,7 +288,6 @@ public class TopicManagerServiceImpl implements TopicManagerService {
|
|||||||
private List<TopicDTO> getTopics(ClusterDO clusterDO,
|
private List<TopicDTO> getTopics(ClusterDO clusterDO,
|
||||||
Map<String, AppDO> appMap,
|
Map<String, AppDO> appMap,
|
||||||
Map<String, TopicDO> topicMap) {
|
Map<String, TopicDO> topicMap) {
|
||||||
Boolean needAuth = !ValidateUtils.isBlank(clusterDO.getSecurityProperties());
|
|
||||||
List<TopicDTO> dtoList = new ArrayList<>();
|
List<TopicDTO> dtoList = new ArrayList<>();
|
||||||
for (String topicName: PhysicalClusterMetadataManager.getTopicNameList(clusterDO.getId())) {
|
for (String topicName: PhysicalClusterMetadataManager.getTopicNameList(clusterDO.getId())) {
|
||||||
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getTopicLogicalCluster(
|
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getTopicLogicalCluster(
|
||||||
@@ -305,7 +304,7 @@ public class TopicManagerServiceImpl implements TopicManagerService {
|
|||||||
dto.setLogicalClusterId(logicalClusterDO.getId());
|
dto.setLogicalClusterId(logicalClusterDO.getId());
|
||||||
dto.setLogicalClusterName(logicalClusterDO.getName());
|
dto.setLogicalClusterName(logicalClusterDO.getName());
|
||||||
dto.setTopicName(topicName);
|
dto.setTopicName(topicName);
|
||||||
dto.setNeedAuth(needAuth);
|
dto.setNeedAuth(Boolean.TRUE);
|
||||||
|
|
||||||
TopicDO topicDO = topicMap.get(topicName);
|
TopicDO topicDO = topicMap.get(topicName);
|
||||||
if (ValidateUtils.isNull(topicDO)) {
|
if (ValidateUtils.isNull(topicDO)) {
|
||||||
@@ -371,12 +370,14 @@ public class TopicManagerServiceImpl implements TopicManagerService {
|
|||||||
TopicMetadata topicMetaData = PhysicalClusterMetadataManager.getTopicMetadata(physicalClusterId, topicName);
|
TopicMetadata topicMetaData = PhysicalClusterMetadataManager.getTopicMetadata(physicalClusterId, topicName);
|
||||||
if (ValidateUtils.isNull(topicMetaData)) {
|
if (ValidateUtils.isNull(topicMetaData)) {
|
||||||
// Topic不存在
|
// Topic不存在
|
||||||
|
LOGGER.warn("class=TopicManagerServiceImpl||method=getTopicAuthorizedApps||physicalClusterId={}||topicName={}||msg=topicMetaData is null", physicalClusterId,topicName);
|
||||||
return new ArrayList<>();
|
return new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
List<AuthorityDO> authorityDOList = authorityService.getAuthorityByTopic(physicalClusterId, topicName);
|
List<AuthorityDO> authorityDOList = authorityService.getAuthorityByTopic(physicalClusterId, topicName);
|
||||||
if (ValidateUtils.isEmptyList(authorityDOList)) {
|
if (ValidateUtils.isEmptyList(authorityDOList)) {
|
||||||
// 无任何权限
|
// 无任何权限
|
||||||
|
LOGGER.warn("class=TopicManagerServiceImpl||method=getTopicAuthorizedApps||physicalClusterId={}||topicName={}||msg=authorityDOList is null", physicalClusterId,topicName);
|
||||||
return new ArrayList<>();
|
return new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -489,12 +490,17 @@ public class TopicManagerServiceImpl implements TopicManagerService {
|
|||||||
PhysicalClusterMetadataManager.getZKConfig(physicalClusterId),
|
PhysicalClusterMetadataManager.getZKConfig(physicalClusterId),
|
||||||
topicName
|
topicName
|
||||||
);
|
);
|
||||||
|
List<RegionDO> regionDOList = regionService.getRegionListByTopicName(physicalClusterId, topicName);
|
||||||
|
List<String> regionNameList = regionDOList.stream().map(RegionDO::getName).collect(Collectors.toList());
|
||||||
|
|
||||||
TopicDO topicDO = getByTopicName(physicalClusterId, topicName);
|
TopicDO topicDO = getByTopicName(physicalClusterId, topicName);
|
||||||
if (ValidateUtils.isNull(topicDO)) {
|
if (ValidateUtils.isNull(topicDO)) {
|
||||||
return new Result<>(convert2RdTopicBasic(clusterDO, topicName, null, null, properties));
|
return new Result<>(convert2RdTopicBasic(clusterDO, topicName, null, null, regionNameList, properties));
|
||||||
}
|
}
|
||||||
AppDO appDO = appService.getByAppId(topicDO.getAppId());
|
AppDO appDO = appService.getByAppId(topicDO.getAppId());
|
||||||
return new Result<>(convert2RdTopicBasic(clusterDO, topicName, topicDO, appDO, properties));
|
|
||||||
|
|
||||||
|
return new Result<>(convert2RdTopicBasic(clusterDO, topicName, topicDO, appDO, regionNameList, properties));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -527,6 +533,7 @@ public class TopicManagerServiceImpl implements TopicManagerService {
|
|||||||
String topicName,
|
String topicName,
|
||||||
TopicDO topicDO,
|
TopicDO topicDO,
|
||||||
AppDO appDO,
|
AppDO appDO,
|
||||||
|
List<String> regionNameList,
|
||||||
Properties properties) {
|
Properties properties) {
|
||||||
RdTopicBasic rdTopicBasic = new RdTopicBasic();
|
RdTopicBasic rdTopicBasic = new RdTopicBasic();
|
||||||
rdTopicBasic.setClusterId(clusterDO.getId());
|
rdTopicBasic.setClusterId(clusterDO.getId());
|
||||||
@@ -539,6 +546,7 @@ public class TopicManagerServiceImpl implements TopicManagerService {
|
|||||||
if (!ValidateUtils.isNull(topicDO)) {
|
if (!ValidateUtils.isNull(topicDO)) {
|
||||||
rdTopicBasic.setDescription(topicDO.getDescription());
|
rdTopicBasic.setDescription(topicDO.getDescription());
|
||||||
}
|
}
|
||||||
|
rdTopicBasic.setRegionNameList(regionNameList);
|
||||||
rdTopicBasic.setProperties(properties);
|
rdTopicBasic.setProperties(properties);
|
||||||
rdTopicBasic.setRetentionTime(KafkaZookeeperUtils.getTopicRetentionTime(properties));
|
rdTopicBasic.setRetentionTime(KafkaZookeeperUtils.getTopicRetentionTime(properties));
|
||||||
return rdTopicBasic;
|
return rdTopicBasic;
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ import com.xiaojukeji.kafka.manager.common.entity.ao.PartitionOffsetDTO;
|
|||||||
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.*;
|
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.*;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.TopicDataSampleDTO;
|
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.TopicDataSampleDTO;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.metrics.TopicMetrics;
|
import com.xiaojukeji.kafka.manager.common.entity.metrics.TopicMetrics;
|
||||||
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
|
|
||||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||||
import com.xiaojukeji.kafka.manager.common.utils.jmx.JmxConstant;
|
import com.xiaojukeji.kafka.manager.common.utils.jmx.JmxConstant;
|
||||||
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.BrokerMetadata;
|
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.BrokerMetadata;
|
||||||
@@ -44,6 +43,7 @@ import org.springframework.stereotype.Service;
|
|||||||
import org.springframework.util.StringUtils;
|
import org.springframework.util.StringUtils;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author limeng
|
* @author limeng
|
||||||
@@ -80,6 +80,9 @@ public class TopicServiceImpl implements TopicService {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private ClusterService clusterService;
|
private ClusterService clusterService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private RegionService regionService;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<TopicMetricsDO> getTopicMetricsFromDB(Long clusterId, String topicName, Date startTime, Date endTime) {
|
public List<TopicMetricsDO> getTopicMetricsFromDB(Long clusterId, String topicName, Date startTime, Date endTime) {
|
||||||
try {
|
try {
|
||||||
@@ -228,25 +231,10 @@ public class TopicServiceImpl implements TopicService {
|
|||||||
basicDTO.setPrincipals(appDO.getPrincipals());
|
basicDTO.setPrincipals(appDO.getPrincipals());
|
||||||
}
|
}
|
||||||
|
|
||||||
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getTopicLogicalCluster(clusterId, topicName);
|
List<RegionDO> regionDOList = regionService.getRegionListByTopicName(clusterId, topicName);
|
||||||
if (!ValidateUtils.isNull(logicalClusterDO)) {
|
basicDTO.setRegionNameList(regionDOList.stream().map(RegionDO::getName).collect(Collectors.toList()));
|
||||||
basicDTO.setRegion(logicalClusterDO.getName());
|
|
||||||
}
|
|
||||||
|
|
||||||
TopicMetrics metrics = jmxService.getTopicMetrics(
|
basicDTO.setTopicCodeC(jmxService.getTopicCodeCValue(clusterId, topicName));
|
||||||
clusterId,
|
|
||||||
topicName,
|
|
||||||
KafkaMetricsCollections.TOPIC_BASIC_PAGE_METRICS,
|
|
||||||
true
|
|
||||||
);
|
|
||||||
|
|
||||||
String compressionType = null;
|
|
||||||
if (!ValidateUtils.isNull(metrics)) {
|
|
||||||
compressionType = metrics.getSpecifiedMetrics("TopicCodeCValue", String.class);
|
|
||||||
}
|
|
||||||
basicDTO.setTopicCodeC(
|
|
||||||
ListUtils.strList2String(new ArrayList<>(new HashSet<>(ListUtils.string2StrList(compressionType))))
|
|
||||||
);
|
|
||||||
basicDTO.setScore(100);
|
basicDTO.setScore(100);
|
||||||
return basicDTO;
|
return basicDTO;
|
||||||
}
|
}
|
||||||
@@ -469,6 +457,7 @@ public class TopicServiceImpl implements TopicService {
|
|||||||
return overview;
|
return overview;
|
||||||
}
|
}
|
||||||
overview.setByteIn(metrics.getBytesInPerSecOneMinuteRate(null));
|
overview.setByteIn(metrics.getBytesInPerSecOneMinuteRate(null));
|
||||||
|
overview.setByteOut(metrics.getBytesOutPerSecOneMinuteRate(null));
|
||||||
overview.setProduceRequest(metrics.getTotalProduceRequestsPerSecOneMinuteRate(null));
|
overview.setProduceRequest(metrics.getTotalProduceRequestsPerSecOneMinuteRate(null));
|
||||||
return overview;
|
return overview;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,6 +19,9 @@ public class ConfigUtils {
|
|||||||
@Value(value = "${spring.profiles.active}")
|
@Value(value = "${spring.profiles.active}")
|
||||||
private String kafkaManagerEnv;
|
private String kafkaManagerEnv;
|
||||||
|
|
||||||
|
@Value(value = "${custom.store-metrics-task.save-days}")
|
||||||
|
private Integer maxMetricsSaveDays;
|
||||||
|
|
||||||
public String getIdc() {
|
public String getIdc() {
|
||||||
return idc;
|
return idc;
|
||||||
}
|
}
|
||||||
@@ -42,4 +45,12 @@ public class ConfigUtils {
|
|||||||
public void setKafkaManagerEnv(String kafkaManagerEnv) {
|
public void setKafkaManagerEnv(String kafkaManagerEnv) {
|
||||||
this.kafkaManagerEnv = kafkaManagerEnv;
|
this.kafkaManagerEnv = kafkaManagerEnv;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Integer getMaxMetricsSaveDays() {
|
||||||
|
return maxMetricsSaveDays;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setMaxMetricsSaveDays(Integer maxMetricsSaveDays) {
|
||||||
|
this.maxMetricsSaveDays = maxMetricsSaveDays;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
package com.xiaojukeji.kafka.manager.service.utils;
|
package com.xiaojukeji.kafka.manager.service.utils;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson.JSON;
|
||||||
import com.xiaojukeji.kafka.manager.common.constant.Constant;
|
import com.xiaojukeji.kafka.manager.common.constant.Constant;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
|
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
|
||||||
@@ -11,6 +12,8 @@ import kafka.utils.ZkUtils;
|
|||||||
import org.I0Itec.zkclient.exception.ZkNodeExistsException;
|
import org.I0Itec.zkclient.exception.ZkNodeExistsException;
|
||||||
import org.apache.kafka.common.errors.*;
|
import org.apache.kafka.common.errors.*;
|
||||||
import org.apache.kafka.common.security.JaasUtils;
|
import org.apache.kafka.common.security.JaasUtils;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
import scala.Option;
|
import scala.Option;
|
||||||
import scala.collection.JavaConversions;
|
import scala.collection.JavaConversions;
|
||||||
import scala.collection.Seq;
|
import scala.collection.Seq;
|
||||||
@@ -22,6 +25,9 @@ import java.util.*;
|
|||||||
* @date 20/4/22
|
* @date 20/4/22
|
||||||
*/
|
*/
|
||||||
public class TopicCommands {
|
public class TopicCommands {
|
||||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(TopicCommands.class);
|
||||||
|
|
||||||
|
|
||||||
public static ResultStatus createTopic(ClusterDO clusterDO,
|
public static ResultStatus createTopic(ClusterDO clusterDO,
|
||||||
String topicName,
|
String topicName,
|
||||||
Integer partitionNum,
|
Integer partitionNum,
|
||||||
@@ -56,16 +62,28 @@ public class TopicCommands {
|
|||||||
false
|
false
|
||||||
);
|
);
|
||||||
} catch (NullPointerException e) {
|
} catch (NullPointerException e) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=createTopic||errMsg={}||clusterDO={}||topicName={}||partitionNum={}||replicaNum={}||brokerIdList={}||config={}",
|
||||||
|
e.getMessage(), clusterDO, topicName, partitionNum, replicaNum, JSON.toJSONString(brokerIdList), config, e);
|
||||||
return ResultStatus.TOPIC_OPERATION_PARAM_NULL_POINTER;
|
return ResultStatus.TOPIC_OPERATION_PARAM_NULL_POINTER;
|
||||||
} catch (InvalidPartitionsException e) {
|
} catch (InvalidPartitionsException e) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=createTopic||errMsg={}||clusterDO={}||topicName={}||partitionNum={}||replicaNum={}||brokerIdList={}||config={}",
|
||||||
|
e.getMessage(), clusterDO, topicName,partitionNum,replicaNum,JSON.toJSONString(brokerIdList),config, e);
|
||||||
return ResultStatus.TOPIC_OPERATION_PARTITION_NUM_ILLEGAL;
|
return ResultStatus.TOPIC_OPERATION_PARTITION_NUM_ILLEGAL;
|
||||||
} catch (InvalidReplicationFactorException e) {
|
} catch (InvalidReplicationFactorException e) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=createTopic||errMsg={}||clusterDO={}||topicName={}||partitionNum={}||replicaNum={}||brokerIdList={}||config={}",
|
||||||
|
e.getMessage(), clusterDO, topicName,partitionNum,replicaNum,JSON.toJSONString(brokerIdList),config, e);
|
||||||
return ResultStatus.BROKER_NUM_NOT_ENOUGH;
|
return ResultStatus.BROKER_NUM_NOT_ENOUGH;
|
||||||
} catch (TopicExistsException | ZkNodeExistsException e) {
|
} catch (TopicExistsException | ZkNodeExistsException e) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=createTopic||errMsg={}||clusterDO={}||topicName={}||partitionNum={}||replicaNum={}||brokerIdList={}||config={}",
|
||||||
|
e.getMessage(), clusterDO, topicName,partitionNum,replicaNum,JSON.toJSONString(brokerIdList),config, e);
|
||||||
return ResultStatus.TOPIC_OPERATION_TOPIC_EXISTED;
|
return ResultStatus.TOPIC_OPERATION_TOPIC_EXISTED;
|
||||||
} catch (InvalidTopicException e) {
|
} catch (InvalidTopicException e) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=createTopic||errMsg={}||clusterDO={}||topicName={}||partitionNum={}||replicaNum={}||brokerIdList={}||config={}",
|
||||||
|
e.getMessage(), clusterDO, topicName,partitionNum,replicaNum,JSON.toJSONString(brokerIdList),config, e);
|
||||||
return ResultStatus.TOPIC_OPERATION_TOPIC_NAME_ILLEGAL;
|
return ResultStatus.TOPIC_OPERATION_TOPIC_NAME_ILLEGAL;
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=createTopic||errMsg={}||clusterDO={}||topicName={}||partitionNum={}||replicaNum={}||brokerIdList={}||config={}",
|
||||||
|
t.getMessage(), clusterDO, topicName,partitionNum,replicaNum,JSON.toJSONString(brokerIdList),config, t);
|
||||||
return ResultStatus.TOPIC_OPERATION_UNKNOWN_ERROR;
|
return ResultStatus.TOPIC_OPERATION_UNKNOWN_ERROR;
|
||||||
} finally {
|
} finally {
|
||||||
if (zkUtils != null) {
|
if (zkUtils != null) {
|
||||||
@@ -86,10 +104,13 @@ public class TopicCommands {
|
|||||||
);
|
);
|
||||||
AdminUtils.deleteTopic(zkUtils, topicName);
|
AdminUtils.deleteTopic(zkUtils, topicName);
|
||||||
} catch (UnknownTopicOrPartitionException e) {
|
} catch (UnknownTopicOrPartitionException e) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=deleteTopic||errMsg={}||clusterDO={}||topicName={}", e.getMessage(), clusterDO, topicName, e);
|
||||||
return ResultStatus.TOPIC_OPERATION_UNKNOWN_TOPIC_PARTITION;
|
return ResultStatus.TOPIC_OPERATION_UNKNOWN_TOPIC_PARTITION;
|
||||||
} catch (ZkNodeExistsException e) {
|
} catch (ZkNodeExistsException e) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=deleteTopic||errMsg={}||clusterDO={}||topicName={}", e.getMessage(), clusterDO, topicName, e);
|
||||||
return ResultStatus.TOPIC_OPERATION_TOPIC_IN_DELETING;
|
return ResultStatus.TOPIC_OPERATION_TOPIC_IN_DELETING;
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=deleteTopic||errMsg={}||clusterDO={}||topicName={}", t.getMessage(), clusterDO, topicName, t);
|
||||||
return ResultStatus.TOPIC_OPERATION_UNKNOWN_ERROR;
|
return ResultStatus.TOPIC_OPERATION_UNKNOWN_ERROR;
|
||||||
} finally {
|
} finally {
|
||||||
if (zkUtils != null) {
|
if (zkUtils != null) {
|
||||||
@@ -108,13 +129,15 @@ public class TopicCommands {
|
|||||||
Constant.DEFAULT_SESSION_TIMEOUT_UNIT_MS,
|
Constant.DEFAULT_SESSION_TIMEOUT_UNIT_MS,
|
||||||
JaasUtils.isZkSecurityEnabled()
|
JaasUtils.isZkSecurityEnabled()
|
||||||
);
|
);
|
||||||
|
|
||||||
AdminUtils.changeTopicConfig(zkUtils, topicName, config);
|
AdminUtils.changeTopicConfig(zkUtils, topicName, config);
|
||||||
} catch (AdminOperationException e) {
|
} catch (AdminOperationException e) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=modifyTopicConfig||errMsg={}||clusterDO={}||topicName={}||config={}", e.getMessage(), clusterDO, topicName,config, e);
|
||||||
return ResultStatus.TOPIC_OPERATION_UNKNOWN_TOPIC_PARTITION;
|
return ResultStatus.TOPIC_OPERATION_UNKNOWN_TOPIC_PARTITION;
|
||||||
} catch (InvalidConfigurationException e) {
|
} catch (InvalidConfigurationException e) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=modifyTopicConfig||errMsg={}||clusterDO={}||topicName={}||config={}", e.getMessage(), clusterDO, topicName,config, e);
|
||||||
return ResultStatus.TOPIC_OPERATION_TOPIC_CONFIG_ILLEGAL;
|
return ResultStatus.TOPIC_OPERATION_TOPIC_CONFIG_ILLEGAL;
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=modifyTopicConfig||errMsg={}||clusterDO={}||topicName={}||config={}", t.getMessage(), clusterDO, topicName,config, t);
|
||||||
return ResultStatus.TOPIC_OPERATION_UNKNOWN_ERROR;
|
return ResultStatus.TOPIC_OPERATION_UNKNOWN_ERROR;
|
||||||
} finally {
|
} finally {
|
||||||
if (zkUtils != null) {
|
if (zkUtils != null) {
|
||||||
@@ -174,6 +197,8 @@ public class TopicCommands {
|
|||||||
true
|
true
|
||||||
);
|
);
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
|
LOGGER.error("class=TopicCommands||method=expandTopic||errMsg={}||clusterDO={}||topicName={}||partitionNum={}||brokerIdList={}"
|
||||||
|
, t.getMessage(), clusterDO, topicName, partitionNum, JSON.toJSONString(brokerIdList), t);
|
||||||
return ResultStatus.TOPIC_OPERATION_UNKNOWN_ERROR;
|
return ResultStatus.TOPIC_OPERATION_UNKNOWN_ERROR;
|
||||||
} finally {
|
} finally {
|
||||||
if (zkUtils != null) {
|
if (zkUtils != null) {
|
||||||
|
|||||||
@@ -31,4 +31,6 @@ public interface TopicThrottledMetricsDao {
|
|||||||
List<TopicThrottledMetricsDO> getAppIdThrottle(long clusterId, String appId, Date startTime, Date endTime);
|
List<TopicThrottledMetricsDO> getAppIdThrottle(long clusterId, String appId, Date startTime, Date endTime);
|
||||||
|
|
||||||
List<TopicThrottledMetricsDO> getLatestTopicThrottledMetrics(Long clusterId, Date afterTime);
|
List<TopicThrottledMetricsDO> getLatestTopicThrottledMetrics(Long clusterId, Date afterTime);
|
||||||
|
|
||||||
|
int deleteBeforeTime(Date endTime);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -73,4 +73,9 @@ public class TopicThrottledMetricsDaoImpl implements TopicThrottledMetricsDao {
|
|||||||
}
|
}
|
||||||
return new ArrayList<>(throttleMap.values());
|
return new ArrayList<>(throttleMap.values());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int deleteBeforeTime(Date endTime) {
|
||||||
|
return sqlSession.delete("TopicThrottledMetricsDao.deleteBeforeTime", endTime);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -54,4 +54,9 @@
|
|||||||
AND gmt_create > #{afterTime}
|
AND gmt_create > #{afterTime}
|
||||||
</select>
|
</select>
|
||||||
|
|
||||||
|
<delete id="deleteBeforeTime" parameterType="java.util.Date">
|
||||||
|
<![CDATA[
|
||||||
|
DELETE FROM topic_throttled_metrics WHERE gmt_create < #{endTime} LIMIT 3000
|
||||||
|
]]>
|
||||||
|
</delete>
|
||||||
</mapper>
|
</mapper>
|
||||||
@@ -76,6 +76,7 @@ public class AccountServiceImpl implements AccountService {
|
|||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("create account failed, operate mysql failed, accountDO:{}.", accountDO, e);
|
LOGGER.error("create account failed, operate mysql failed, accountDO:{}.", accountDO, e);
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=AccountServiceImpl||method=createAccount||accountDO={}||msg=add account fail,{}!", accountDO,ResultStatus.MYSQL_ERROR.getMessage());
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,6 +89,7 @@ public class AccountServiceImpl implements AccountService {
|
|||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("delete account failed, username:{}.", username, e);
|
LOGGER.error("delete account failed, username:{}.", username, e);
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=AccountServiceImpl||method=deleteByName||username={}||msg=delete account fail,{}!", username,ResultStatus.MYSQL_ERROR.getMessage());
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -110,6 +112,7 @@ public class AccountServiceImpl implements AccountService {
|
|||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("update account failed, accountDO:{}.", accountDO, e);
|
LOGGER.error("update account failed, accountDO:{}.", accountDO, e);
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=AccountServiceImpl||method=updateAccount||accountDO={}||msg=update account fail,{}!", accountDO,ResultStatus.MYSQL_ERROR.getMessage());
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,23 +6,24 @@ package com.xiaojukeji.kafka.manager.bpm.common;
|
|||||||
* @date 19/6/23
|
* @date 19/6/23
|
||||||
*/
|
*/
|
||||||
public enum OrderTypeEnum {
|
public enum OrderTypeEnum {
|
||||||
APPLY_TOPIC (00, "Topic申请", "applyTopicOrder"),
|
APPLY_TOPIC (00, "Topic申请", "applyTopicOrder"),
|
||||||
DELETE_TOPIC (10, "Topic下线", "deleteTopicOrder"),
|
DELETE_TOPIC (10, "Topic下线", "deleteTopicOrder"),
|
||||||
|
THIRD_PART_DELETE_TOPIC (20, "第三方Topic下线申请", "thirdPartDeleteTopicOrder"),
|
||||||
|
|
||||||
APPLY_APP (01, "应用申请", "applyAppOrder"),
|
APPLY_APP (01, "应用申请", "applyAppOrder"),
|
||||||
DELETE_APP (11, "应用下线", "deleteAppOrder"),
|
DELETE_APP (11, "应用下线", "deleteAppOrder"),
|
||||||
|
|
||||||
APPLY_QUOTA (02, "配额申请", "applyQuotaOrder"),
|
APPLY_QUOTA (02, "配额申请", "applyQuotaOrder"),
|
||||||
APPLY_PARTITION (12, "分区申请", "applyPartitionOrder"),
|
APPLY_PARTITION (12, "分区申请", "applyPartitionOrder"),
|
||||||
|
|
||||||
APPLY_AUTHORITY (03, "权限申请", "applyAuthorityOrder"),
|
APPLY_AUTHORITY (03, "权限申请", "applyAuthorityOrder"),
|
||||||
DELETE_AUTHORITY (13, "权限删除", "deleteAuthorityOrder"),
|
DELETE_AUTHORITY (13, "权限删除", "deleteAuthorityOrder"),
|
||||||
|
|
||||||
APPLY_CLUSTER (04, "集群申请", "applyClusterOrder"),
|
APPLY_CLUSTER (04, "集群申请", "applyClusterOrder"),
|
||||||
DELETE_CLUSTER (14, "集群下线", "deleteClusterOrder"),
|
DELETE_CLUSTER (14, "集群下线", "deleteClusterOrder"),
|
||||||
|
|
||||||
APPLY_EXPAND_CLUSTER(05, "集群扩容", "modifyClusterOrder"),
|
APPLY_EXPAND_CLUSTER (05, "集群扩容", "modifyClusterOrder"),
|
||||||
APPLY_REDUCE_CLUSTER(15, "集群缩容", "modifyClusterOrder"),
|
APPLY_REDUCE_CLUSTER (15, "集群缩容", "modifyClusterOrder"),
|
||||||
|
|
||||||
;
|
;
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,69 @@
|
|||||||
|
package com.xiaojukeji.kafka.manager.bpm.common.entry.apply;
|
||||||
|
|
||||||
|
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author zengqiao
|
||||||
|
* @date 20/12/2
|
||||||
|
*/
|
||||||
|
public class OrderExtensionThirdPartDeleteTopicDTO {
|
||||||
|
private Long clusterId;
|
||||||
|
|
||||||
|
private String topicName;
|
||||||
|
|
||||||
|
private String appId;
|
||||||
|
|
||||||
|
private String password;
|
||||||
|
|
||||||
|
public Long getClusterId() {
|
||||||
|
return clusterId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setClusterId(Long clusterId) {
|
||||||
|
this.clusterId = clusterId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTopicName() {
|
||||||
|
return topicName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTopicName(String topicName) {
|
||||||
|
this.topicName = topicName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getAppId() {
|
||||||
|
return appId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAppId(String appId) {
|
||||||
|
this.appId = appId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPassword() {
|
||||||
|
return password;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPassword(String password) {
|
||||||
|
this.password = password;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "OrderExtensionThirdPartDeleteTopicDTO{" +
|
||||||
|
"clusterId=" + clusterId +
|
||||||
|
", topicName='" + topicName + '\'' +
|
||||||
|
", appId='" + appId + '\'' +
|
||||||
|
", password='" + password + '\'' +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean paramLegal() {
|
||||||
|
if (ValidateUtils.isNull(clusterId)
|
||||||
|
|| ValidateUtils.isBlank(topicName)
|
||||||
|
|| ValidateUtils.isBlank(appId)
|
||||||
|
|| ValidateUtils.isBlank(password)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,11 +3,8 @@ package com.xiaojukeji.kafka.manager.bpm.component;
|
|||||||
import com.xiaojukeji.kafka.manager.bpm.common.OrderStatusEnum;
|
import com.xiaojukeji.kafka.manager.bpm.common.OrderStatusEnum;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.OrderDO;
|
import com.xiaojukeji.kafka.manager.common.entity.pojo.OrderDO;
|
||||||
import com.xiaojukeji.kafka.manager.common.events.OrderApplyEvent;
|
|
||||||
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
|
|
||||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||||
import com.xiaojukeji.kafka.manager.dao.OrderDao;
|
import com.xiaojukeji.kafka.manager.dao.OrderDao;
|
||||||
import com.xiaojukeji.kafka.manager.service.utils.ConfigUtils;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
@@ -27,17 +24,12 @@ public class LocalStorageService extends AbstractOrderStorageService {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private OrderDao orderDao;
|
private OrderDao orderDao;
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private ConfigUtils configUtils;
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ResultStatus directSaveHandledOrder(OrderDO orderDO) {
|
public ResultStatus directSaveHandledOrder(OrderDO orderDO) {
|
||||||
try {
|
try {
|
||||||
if (orderDao.directSaveHandledOrder(orderDO) <= 0) {
|
if (orderDao.directSaveHandledOrder(orderDO) <= 0) {
|
||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
// 无需进行通知
|
|
||||||
// SpringTool.publish(new OrderApplyEvent(this, orderDO, configUtils.getIdc()));
|
|
||||||
return ResultStatus.SUCCESS;
|
return ResultStatus.SUCCESS;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("add order failed, orderDO:{}.", orderDO, e);
|
LOGGER.error("add order failed, orderDO:{}.", orderDO, e);
|
||||||
@@ -52,7 +44,6 @@ public class LocalStorageService extends AbstractOrderStorageService {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
SpringTool.publish(new OrderApplyEvent(this, orderDO, configUtils.getIdc()));
|
|
||||||
return true;
|
return true;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("add order failed, orderDO:{}.", orderDO, e);
|
LOGGER.error("add order failed, orderDO:{}.", orderDO, e);
|
||||||
|
|||||||
@@ -261,6 +261,14 @@ public class OrderServiceImpl implements OrderService {
|
|||||||
resultList.add(new OrderResult(id, Result.buildFrom(ResultStatus.ORDER_NOT_EXIST)));
|
resultList.add(new OrderResult(id, Result.buildFrom(ResultStatus.ORDER_NOT_EXIST)));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
// topic申请、topic分区申请不支持批量审批通过.
|
||||||
|
if (orderDO.getType().equals(OrderTypeEnum.APPLY_TOPIC.getCode())
|
||||||
|
|| orderDO.getType().equals(OrderTypeEnum.APPLY_PARTITION.getCode())) {
|
||||||
|
if (OrderStatusEnum.PASSED.getCode().equals(reqObj.getStatus())) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
orderDOList.add(orderDO);
|
orderDOList.add(orderDO);
|
||||||
}
|
}
|
||||||
// 根据创建时间排序
|
// 根据创建时间排序
|
||||||
|
|||||||
@@ -0,0 +1,164 @@
|
|||||||
|
package com.xiaojukeji.kafka.manager.bpm.order.impl;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson.JSONObject;
|
||||||
|
import com.xiaojukeji.kafka.manager.bpm.common.OrderTypeEnum;
|
||||||
|
import com.xiaojukeji.kafka.manager.bpm.common.entry.apply.OrderExtensionThirdPartDeleteTopicDTO;
|
||||||
|
import com.xiaojukeji.kafka.manager.bpm.common.entry.detail.AbstractOrderDetailData;
|
||||||
|
import com.xiaojukeji.kafka.manager.bpm.common.entry.detail.OrderDetailDeleteTopicDTO;
|
||||||
|
import com.xiaojukeji.kafka.manager.bpm.common.handle.OrderHandleBaseDTO;
|
||||||
|
import com.xiaojukeji.kafka.manager.bpm.order.AbstractTopicOrder;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.constant.Constant;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.TopicConnection;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.entity.pojo.OrderDO;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.entity.pojo.TopicDO;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.AppDO;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||||
|
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
|
||||||
|
import com.xiaojukeji.kafka.manager.service.cache.PhysicalClusterMetadataManager;
|
||||||
|
import com.xiaojukeji.kafka.manager.service.service.AdminService;
|
||||||
|
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
|
||||||
|
import com.xiaojukeji.kafka.manager.service.service.TopicManagerService;
|
||||||
|
import com.xiaojukeji.kafka.manager.service.service.gateway.AppService;
|
||||||
|
import com.xiaojukeji.kafka.manager.service.service.gateway.TopicConnectionService;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author zengqiao
|
||||||
|
* @date 20/12/2
|
||||||
|
*/
|
||||||
|
@Component("thirdPartDeleteTopicOrder")
|
||||||
|
public class ThirdPartDeleteTopicOrder extends AbstractTopicOrder {
|
||||||
|
@Autowired
|
||||||
|
private LogicalClusterMetadataManager logicalClusterMetadataManager;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private AppService appService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ClusterService clusterService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private AdminService adminService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TopicManagerService topicManagerService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TopicConnectionService connectionService;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AbstractOrderDetailData getOrderExtensionDetailData(String extensions) {
|
||||||
|
OrderDetailDeleteTopicDTO orderDetailDTO = new OrderDetailDeleteTopicDTO();
|
||||||
|
OrderExtensionThirdPartDeleteTopicDTO orderExtensionDTO = JSONObject.parseObject(
|
||||||
|
extensions,
|
||||||
|
OrderExtensionThirdPartDeleteTopicDTO.class);
|
||||||
|
orderDetailDTO.setTopicName(orderExtensionDTO.getTopicName());
|
||||||
|
ClusterDO clusterDO = clusterService.getById(orderExtensionDTO.getClusterId());
|
||||||
|
if (!ValidateUtils.isNull(clusterDO)) {
|
||||||
|
orderDetailDTO.setPhysicalClusterId(clusterDO.getId());
|
||||||
|
orderDetailDTO.setPhysicalClusterName(clusterDO.getClusterName());
|
||||||
|
}
|
||||||
|
|
||||||
|
List<TopicConnection> connectionDTOList = connectionService.getByTopicName(
|
||||||
|
clusterDO.getId(),
|
||||||
|
orderExtensionDTO.getTopicName(),
|
||||||
|
new Date(System.currentTimeMillis() - Constant.TOPIC_CONNECTION_LATEST_TIME_MS),
|
||||||
|
new Date());
|
||||||
|
orderDetailDTO.setConnectionList(connectionDTOList);
|
||||||
|
|
||||||
|
TopicDO topicDO = topicManagerService.getByTopicName(clusterDO.getId(), orderExtensionDTO.getTopicName());
|
||||||
|
if (ValidateUtils.isNull(topicDO)) {
|
||||||
|
return orderDetailDTO;
|
||||||
|
}
|
||||||
|
|
||||||
|
AppDO appDO = appService.getByAppId(topicDO.getAppId());
|
||||||
|
if (ValidateUtils.isNull(appDO)) {
|
||||||
|
return orderDetailDTO;
|
||||||
|
}
|
||||||
|
orderDetailDTO.setAppId(appDO.getAppId());
|
||||||
|
orderDetailDTO.setAppName(appDO.getName());
|
||||||
|
orderDetailDTO.setAppPrincipals(appDO.getPrincipals());
|
||||||
|
return orderDetailDTO;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result<String> checkExtensionFieldsAndGenerateTitle(String extensions) {
|
||||||
|
OrderExtensionThirdPartDeleteTopicDTO orderExtensionDTO = JSONObject.parseObject(
|
||||||
|
extensions,
|
||||||
|
OrderExtensionThirdPartDeleteTopicDTO.class);
|
||||||
|
if (!orderExtensionDTO.paramLegal()) {
|
||||||
|
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
|
||||||
|
}
|
||||||
|
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(orderExtensionDTO.getClusterId(), true);
|
||||||
|
if (ValidateUtils.isNull(physicalClusterId)) {
|
||||||
|
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
|
||||||
|
}
|
||||||
|
if (!PhysicalClusterMetadataManager.isTopicExist(physicalClusterId, orderExtensionDTO.getTopicName())) {
|
||||||
|
return Result.buildFrom(ResultStatus.TOPIC_NOT_EXIST);
|
||||||
|
}
|
||||||
|
AppDO appDO = appService.getByAppId(orderExtensionDTO.getAppId());
|
||||||
|
if (ValidateUtils.isNull(appDO)) {
|
||||||
|
return Result.buildFrom(ResultStatus.APP_NOT_EXIST);
|
||||||
|
}
|
||||||
|
if (!appDO.getPassword().equals(orderExtensionDTO.getPassword())) {
|
||||||
|
return Result.buildFrom(ResultStatus.USER_WITHOUT_AUTHORITY);
|
||||||
|
}
|
||||||
|
|
||||||
|
String title = String.format(
|
||||||
|
"%s-%d-%s",
|
||||||
|
OrderTypeEnum.DELETE_TOPIC.getMessage(),
|
||||||
|
orderExtensionDTO.getClusterId(),
|
||||||
|
orderExtensionDTO.getTopicName()
|
||||||
|
);
|
||||||
|
return new Result<>(title);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ResultStatus handleOrderDetail(OrderDO orderDO,
|
||||||
|
OrderHandleBaseDTO orderHandleBaseDTO,
|
||||||
|
String userName) {
|
||||||
|
OrderExtensionThirdPartDeleteTopicDTO extensionDTO = JSONObject.parseObject(orderDO.getExtensions(),
|
||||||
|
OrderExtensionThirdPartDeleteTopicDTO.class);
|
||||||
|
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(extensionDTO.getClusterId(), true);
|
||||||
|
if (ValidateUtils.isNull(physicalClusterId)) {
|
||||||
|
return ResultStatus.CLUSTER_NOT_EXIST;
|
||||||
|
}
|
||||||
|
ClusterDO clusterDO = clusterService.getById(physicalClusterId);
|
||||||
|
if (!PhysicalClusterMetadataManager.isTopicExistStrictly(physicalClusterId, extensionDTO.getTopicName())) {
|
||||||
|
return ResultStatus.TOPIC_NOT_EXIST;
|
||||||
|
}
|
||||||
|
if (connectionService.isExistConnection(
|
||||||
|
physicalClusterId,
|
||||||
|
extensionDTO.getTopicName(),
|
||||||
|
new Date(System.currentTimeMillis() - Constant.TOPIC_CONNECTION_LATEST_TIME_MS),
|
||||||
|
new Date())
|
||||||
|
) {
|
||||||
|
return ResultStatus.OPERATION_FORBIDDEN;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 检查申请人是否在应用负责人里面
|
||||||
|
AppDO appDO = appService.getByAppId(extensionDTO.getAppId());
|
||||||
|
if (ValidateUtils.isNull(appDO)) {
|
||||||
|
return ResultStatus.APP_NOT_EXIST;
|
||||||
|
}
|
||||||
|
if (!appDO.getPassword().equals(extensionDTO.getPassword())
|
||||||
|
|| !ListUtils.string2StrList(appDO.getPrincipals()).contains(orderDO.getApplicant())) {
|
||||||
|
// 密码错误 or 申请人不在应用负责人里面, 则返回错误
|
||||||
|
return ResultStatus.USER_WITHOUT_AUTHORITY;
|
||||||
|
}
|
||||||
|
|
||||||
|
ResultStatus resultStatus = adminService.deleteTopic(clusterDO, extensionDTO.getTopicName(), userName);
|
||||||
|
if (!ResultStatus.SUCCESS.equals(resultStatus)) {
|
||||||
|
return resultStatus;
|
||||||
|
}
|
||||||
|
return resultStatus;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -94,6 +94,7 @@ public class N9e extends AbstractAgent {
|
|||||||
);
|
);
|
||||||
N9eResult zr = JSON.parseObject(response, N9eResult.class);
|
N9eResult zr = JSON.parseObject(response, N9eResult.class);
|
||||||
if (!ValidateUtils.isBlank(zr.getErr())) {
|
if (!ValidateUtils.isBlank(zr.getErr())) {
|
||||||
|
LOGGER.warn("class=N9e||method=createTask||param={}||errMsg={}||msg=call create task fail", JsonUtils.toJSONString(param),zr.getErr());
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return Long.valueOf(zr.getDat().toString());
|
return Long.valueOf(zr.getDat().toString());
|
||||||
@@ -110,7 +111,7 @@ public class N9e extends AbstractAgent {
|
|||||||
|
|
||||||
String response = null;
|
String response = null;
|
||||||
try {
|
try {
|
||||||
response = HttpUtils.postForString(
|
response = HttpUtils.putForString(
|
||||||
baseUrl + ACTION_TASK_URI.replace("{taskId}", taskId.toString()),
|
baseUrl + ACTION_TASK_URI.replace("{taskId}", taskId.toString()),
|
||||||
JSON.toJSONString(param),
|
JSON.toJSONString(param),
|
||||||
buildHeader()
|
buildHeader()
|
||||||
@@ -119,6 +120,7 @@ public class N9e extends AbstractAgent {
|
|||||||
if (ValidateUtils.isBlank(zr.getErr())) {
|
if (ValidateUtils.isBlank(zr.getErr())) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=N9e||method=actionTask||param={}||errMsg={}||msg=call action task fail", JSON.toJSONString(param),zr.getErr());
|
||||||
return false;
|
return false;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("action task failed, taskId:{}, action:{}.", taskId, action, e);
|
LOGGER.error("action task failed, taskId:{}, action:{}.", taskId, action, e);
|
||||||
@@ -134,7 +136,7 @@ public class N9e extends AbstractAgent {
|
|||||||
|
|
||||||
String response = null;
|
String response = null;
|
||||||
try {
|
try {
|
||||||
response = HttpUtils.postForString(
|
response = HttpUtils.putForString(
|
||||||
baseUrl + ACTION_HOST_TASK_URI.replace("{taskId}", taskId.toString()),
|
baseUrl + ACTION_HOST_TASK_URI.replace("{taskId}", taskId.toString()),
|
||||||
JSON.toJSONString(param),
|
JSON.toJSONString(param),
|
||||||
buildHeader()
|
buildHeader()
|
||||||
@@ -143,6 +145,7 @@ public class N9e extends AbstractAgent {
|
|||||||
if (ValidateUtils.isBlank(zr.getErr())) {
|
if (ValidateUtils.isBlank(zr.getErr())) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
LOGGER.warn("class=N9e||method=actionHostTask||param={}||errMsg={}||msg=call action host task fail", JSON.toJSONString(param),zr.getErr());
|
||||||
return false;
|
return false;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("action task failed, taskId:{} action:{} hostname:{}.", taskId, action, hostname, e);
|
LOGGER.error("action task failed, taskId:{} action:{} hostname:{}.", taskId, action, hostname, e);
|
||||||
@@ -265,6 +268,7 @@ public class N9e extends AbstractAgent {
|
|||||||
|
|
||||||
while ((line = bufferedReader.readLine()) != null) {
|
while ((line = bufferedReader.readLine()) != null) {
|
||||||
stringBuilder.append(line);
|
stringBuilder.append(line);
|
||||||
|
stringBuilder.append("\n");
|
||||||
}
|
}
|
||||||
return stringBuilder.toString();
|
return stringBuilder.toString();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package com.xiaojukeji.kafka.manager.kcm.component.storage.local;
|
|||||||
|
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
import com.xiaojukeji.kafka.manager.kcm.component.storage.AbstractStorageService;
|
import com.xiaojukeji.kafka.manager.kcm.component.storage.AbstractStorageService;
|
||||||
import org.springframework.web.multipart.MultipartFile;
|
import org.springframework.web.multipart.MultipartFile;
|
||||||
@@ -12,6 +13,9 @@ import org.springframework.web.multipart.MultipartFile;
|
|||||||
*/
|
*/
|
||||||
@Service("storageService")
|
@Service("storageService")
|
||||||
public class Local extends AbstractStorageService {
|
public class Local extends AbstractStorageService {
|
||||||
|
@Value("${kcm.storage.base-url}")
|
||||||
|
private String baseUrl;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean upload(String fileName, String fileMd5, MultipartFile uploadFile) {
|
public boolean upload(String fileName, String fileMd5, MultipartFile uploadFile) {
|
||||||
return false;
|
return false;
|
||||||
@@ -24,6 +28,6 @@ public class Local extends AbstractStorageService {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getDownloadBaseUrl() {
|
public String getDownloadBaseUrl() {
|
||||||
return "";
|
return baseUrl;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -56,6 +56,7 @@ public class KafkaFileServiceImpl implements KafkaFileService {
|
|||||||
}
|
}
|
||||||
return ResultStatus.SUCCESS;
|
return ResultStatus.SUCCESS;
|
||||||
} catch (DuplicateKeyException e) {
|
} catch (DuplicateKeyException e) {
|
||||||
|
LOGGER.error("class=KafkaFileServiceImpl||method=uploadKafkaFile||errMsg={}||kafkaFileDTO={}||username={}", e.getMessage(), kafkaFileDTO, username, e);
|
||||||
return ResultStatus.RESOURCE_ALREADY_EXISTED;
|
return ResultStatus.RESOURCE_ALREADY_EXISTED;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("upload kafka file failed, kafkaFileDTO:{}.", kafkaFileDTO, e);
|
LOGGER.error("upload kafka file failed, kafkaFileDTO:{}.", kafkaFileDTO, e);
|
||||||
@@ -93,6 +94,7 @@ public class KafkaFileServiceImpl implements KafkaFileService {
|
|||||||
return ResultStatus.MYSQL_ERROR;
|
return ResultStatus.MYSQL_ERROR;
|
||||||
}
|
}
|
||||||
} catch (DuplicateKeyException e) {
|
} catch (DuplicateKeyException e) {
|
||||||
|
LOGGER.error("class=KafkaFileServiceImpl||method=modifyKafkaFile||errMsg={}||kafkaFileDTO={}||userName={}", e.getMessage(), kafkaFileDTO, userName, e);
|
||||||
return ResultStatus.RESOURCE_NAME_DUPLICATED;
|
return ResultStatus.RESOURCE_NAME_DUPLICATED;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("modify kafka file failed, kafkaFileDTO:{}.", kafkaFileDTO, e);
|
LOGGER.error("modify kafka file failed, kafkaFileDTO:{}.", kafkaFileDTO, e);
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ public class ClusterHostTaskService extends AbstractClusterTaskService {
|
|||||||
CreationTaskData dto = new CreationTaskData();
|
CreationTaskData dto = new CreationTaskData();
|
||||||
for (String hostname: clusterHostTaskDTO.getHostList()) {
|
for (String hostname: clusterHostTaskDTO.getHostList()) {
|
||||||
if (!NetUtils.hostnameLegal(hostname)) {
|
if (!NetUtils.hostnameLegal(hostname)) {
|
||||||
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
|
return Result.buildFrom(ResultStatus.CLUSTER_TASK_HOST_LIST_ILLEGAL);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
dto.setHostList(clusterHostTaskDTO.getHostList());
|
dto.setHostList(clusterHostTaskDTO.getHostList());
|
||||||
|
|||||||
@@ -19,12 +19,13 @@ p_kafka_server_properties_md5=${8} #server配置MD5
|
|||||||
p_kafka_server_properties_url=${9} #server配置文件下载地址
|
p_kafka_server_properties_url=${9} #server配置文件下载地址
|
||||||
|
|
||||||
#----------------------------------------配置信息------------------------------------------------------#
|
#----------------------------------------配置信息------------------------------------------------------#
|
||||||
g_hostname=`hostname`
|
g_base_dir='/home'
|
||||||
g_base_dir='/home/km'
|
|
||||||
g_cluster_task_dir=${g_base_dir}"/kafka_cluster_task/task_${p_task_id}" #部署升级路径
|
g_cluster_task_dir=${g_base_dir}"/kafka_cluster_task/task_${p_task_id}" #部署升级路径
|
||||||
g_rollback_version=${g_cluster_task_dir}"/rollback_version" #回滚版本
|
g_rollback_version=${g_cluster_task_dir}"/rollback_version" #回滚版本
|
||||||
g_new_kafka_package_name='' #最终的包名
|
g_new_kafka_package_name='' #最终的包名
|
||||||
g_kafka_manager_addr='' #kafka-manager地址
|
g_kafka_manager_addr='' #kafka-manager地址
|
||||||
|
g_local_ip=`ifconfig -a|grep inet|grep -v 127.0.0.1|grep -v inet6|awk '{print $2}'|tr -d "addr:"`
|
||||||
|
g_hostname=${g_local_ip}
|
||||||
|
|
||||||
#----------------------------------------操作函数------------------------------------------------------#
|
#----------------------------------------操作函数------------------------------------------------------#
|
||||||
|
|
||||||
@@ -71,11 +72,11 @@ function check_and_init_env() {
|
|||||||
|
|
||||||
# 检查并等待集群所有的副本处于同步的状态
|
# 检查并等待集群所有的副本处于同步的状态
|
||||||
function check_and_wait_broker_stabled() {
|
function check_and_wait_broker_stabled() {
|
||||||
under_replication_count=`curl -s -G -d "hostname="#{g_hostname} ${g_kafka_manager_addr}/api/v1/third-part/${p_cluster_id}/broker-stabled | python -m json.tool | grep true |wc -l`
|
under_replication_count=`curl -s -G -d "hostname="${g_hostname} ${g_kafka_manager_addr}/api/v1/third-part/${p_cluster_id}/broker-stabled | python -m json.tool | grep true |wc -l`
|
||||||
while [ "$under_replication_count" -ne 1 ]; do
|
while [ "$under_replication_count" -ne 1 ]; do
|
||||||
ECHO_LOG "存在${under_replication_count}个副本未同步, sleep 10s"
|
ECHO_LOG "存在${under_replication_count}个副本未同步, sleep 10s"
|
||||||
sleep 10
|
sleep 10
|
||||||
under_replication_count=`curl -s ${g_kafka_manager_addr}/api/v1/${p_cluster_id}/overview | python -m json.tool | grep false |wc -l`
|
under_replication_count=`curl -s -G -d "hostname="${g_hostname} ${g_kafka_manager_addr}/api/v1/third-part/${p_cluster_id}/broker-stabled | python -m json.tool | grep true |wc -l`
|
||||||
done
|
done
|
||||||
ECHO_LOG "集群副本都已经处于同步的状态, 可以进行集群升级"
|
ECHO_LOG "集群副本都已经处于同步的状态, 可以进行集群升级"
|
||||||
}
|
}
|
||||||
@@ -137,6 +138,9 @@ function prepare_cluster_task_files() {
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# listeners配置,换成当前机器的IP,写到server.properties最后一行
|
||||||
|
echo "listeners=SASL_PLAINTEXT://${g_local_ip}:9093,PLAINTEXT://${g_local_ip}:9092" >> "${g_cluster_task_dir}/${p_kafka_package_name}/config/server.properties"
|
||||||
|
|
||||||
# 将MD5信息写到包中
|
# 将MD5信息写到包中
|
||||||
echo "package_md5:${p_kafka_package_md5} server_properties_md5:${p_kafka_package_md5}" > "${g_cluster_task_dir}/${p_kafka_package_name}/package_and_properties.md5"
|
echo "package_md5:${p_kafka_package_md5} server_properties_md5:${p_kafka_package_md5}" > "${g_cluster_task_dir}/${p_kafka_package_name}/package_and_properties.md5"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -132,14 +132,12 @@ public class ThirdPartServiceImpl implements ThirdPartService {
|
|||||||
if (ValidateUtils.isNull(dto)) {
|
if (ValidateUtils.isNull(dto)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
List<PartitionOffsetDTO> offsetDTOList = dto.getPartitionOffsetDTOList();
|
|
||||||
if (ValidateUtils.isEmptyList(offsetDTOList)) {
|
List<PartitionOffsetDTO> offsetDTOList = this.getPartitionOffsetDTOList(clusterDO, dto);
|
||||||
offsetDTOList = topicService.getPartitionOffsetList(
|
|
||||||
clusterDO, dto.getTopicName(), dto.getTimestamp());
|
|
||||||
}
|
|
||||||
if (ValidateUtils.isEmptyList(offsetDTOList)) {
|
if (ValidateUtils.isEmptyList(offsetDTOList)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
OffsetLocationEnum offsetLocation = dto.getLocation().equals(
|
OffsetLocationEnum offsetLocation = dto.getLocation().equals(
|
||||||
OffsetLocationEnum.ZOOKEEPER.location) ? OffsetLocationEnum.ZOOKEEPER : OffsetLocationEnum.BROKER;
|
OffsetLocationEnum.ZOOKEEPER.location) ? OffsetLocationEnum.ZOOKEEPER : OffsetLocationEnum.BROKER;
|
||||||
ResultStatus result = checkConsumerGroupExist(clusterDO, dto.getTopicName(), dto.getConsumerGroup(), offsetLocation, dto.getCreateIfAbsent());
|
ResultStatus result = checkConsumerGroupExist(clusterDO, dto.getTopicName(), dto.getConsumerGroup(), offsetLocation, dto.getCreateIfAbsent());
|
||||||
@@ -160,6 +158,39 @@ public class ThirdPartServiceImpl implements ThirdPartService {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private List<PartitionOffsetDTO> getPartitionOffsetDTOList(ClusterDO clusterDO, OffsetResetDTO dto) {
|
||||||
|
List<PartitionOffsetDTO> offsetDTOList = dto.getPartitionOffsetDTOList();
|
||||||
|
if (!ValidateUtils.isEmptyList(offsetDTOList)) {
|
||||||
|
return offsetDTOList;
|
||||||
|
}
|
||||||
|
|
||||||
|
offsetDTOList = topicService.getPartitionOffsetList(clusterDO, dto.getTopicName(), dto.getTimestamp());
|
||||||
|
if (!ValidateUtils.isEmptyList(offsetDTOList)) {
|
||||||
|
return offsetDTOList;
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<TopicPartition, Long> endOffsetMap = topicService.getPartitionOffset(clusterDO, dto.getTopicName(), OffsetPosEnum.END);
|
||||||
|
if (ValidateUtils.isEmptyMap(endOffsetMap)) {
|
||||||
|
return new ArrayList<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<TopicPartition, Long> beginOffsetMap = topicService.getPartitionOffset(clusterDO, dto.getTopicName(), OffsetPosEnum.BEGINNING);
|
||||||
|
if (ValidateUtils.isEmptyMap(beginOffsetMap)) {
|
||||||
|
return new ArrayList<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
offsetDTOList = new ArrayList<>();
|
||||||
|
for (Map.Entry<TopicPartition, Long> entry: endOffsetMap.entrySet()) {
|
||||||
|
Long beginOffset = beginOffsetMap.get(entry.getKey());
|
||||||
|
if (ValidateUtils.isNull(beginOffset) || !beginOffset.equals(entry.getValue())) {
|
||||||
|
// offset 不相等, 表示还有数据, 则直接返回
|
||||||
|
return new ArrayList<>();
|
||||||
|
}
|
||||||
|
offsetDTOList.add(new PartitionOffsetDTO(entry.getKey().partition(), entry.getValue()));
|
||||||
|
}
|
||||||
|
return offsetDTOList;
|
||||||
|
}
|
||||||
|
|
||||||
private ResultStatus checkConsumerGroupExist(ClusterDO clusterDO,
|
private ResultStatus checkConsumerGroupExist(ClusterDO clusterDO,
|
||||||
String topicName,
|
String topicName,
|
||||||
String consumerGroup,
|
String consumerGroup,
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.xiaojukeji.kafka.manager.task.dispatch.metrics.store;
|
package com.xiaojukeji.kafka.manager.task.dispatch.metrics.collect;
|
||||||
|
|
||||||
import com.xiaojukeji.kafka.manager.common.constant.KafkaMetricsCollections;
|
import com.xiaojukeji.kafka.manager.common.constant.KafkaMetricsCollections;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.metrics.TopicMetrics;
|
import com.xiaojukeji.kafka.manager.common.entity.metrics.TopicMetrics;
|
||||||
@@ -16,12 +16,12 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Topic社区指标存储
|
* Topic社区指标收集
|
||||||
* @author zengqiao
|
* @author zengqiao
|
||||||
* @date 20/7/21
|
* @date 20/7/21
|
||||||
*/
|
*/
|
||||||
@CustomScheduled(name = "storeCommunityTopicMetrics", cron = "31 0/1 * * * ?", threadNum = 5)
|
@CustomScheduled(name = "collectAndPublishCommunityTopicMetrics", cron = "31 0/1 * * * ?", threadNum = 5)
|
||||||
public class StoreCommunityTopicMetrics extends AbstractScheduledTask<ClusterDO> {
|
public class CollectAndPublishCommunityTopicMetrics extends AbstractScheduledTask<ClusterDO> {
|
||||||
@Autowired
|
@Autowired
|
||||||
private JmxService jmxService;
|
private JmxService jmxService;
|
||||||
|
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
package com.xiaojukeji.kafka.manager.task.dispatch.metrics.delete;
|
package com.xiaojukeji.kafka.manager.task.dispatch.metrics.delete;
|
||||||
|
|
||||||
|
import com.xiaojukeji.kafka.manager.common.constant.Constant;
|
||||||
import com.xiaojukeji.kafka.manager.common.constant.LogConstant;
|
import com.xiaojukeji.kafka.manager.common.constant.LogConstant;
|
||||||
import com.xiaojukeji.kafka.manager.dao.*;
|
import com.xiaojukeji.kafka.manager.dao.*;
|
||||||
import com.xiaojukeji.kafka.manager.service.utils.ConfigUtils;
|
import com.xiaojukeji.kafka.manager.service.utils.ConfigUtils;
|
||||||
@@ -19,27 +20,30 @@ import java.util.List;
|
|||||||
* @author zengqiao
|
* @author zengqiao
|
||||||
* @date 20/1/8
|
* @date 20/1/8
|
||||||
*/
|
*/
|
||||||
@CustomScheduled(name = "deleteMetrics", cron = "0 0/1 * * * ?", threadNum = 1)
|
@CustomScheduled(name = "deleteMetrics", cron = "0 0/2 * * * ?", threadNum = 1)
|
||||||
public class DeleteMetrics extends AbstractScheduledTask<EmptyEntry> {
|
public class DeleteMetrics extends AbstractScheduledTask<EmptyEntry> {
|
||||||
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private TopicMetricsDao topicMetricsDao;
|
private ConfigUtils configUtils;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private TopicAppMetricsDao topicAppMetricsDao;
|
private TopicMetricsDao topicMetricsDao;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private TopicRequestMetricsDao topicRequestMetricsDao;
|
private TopicAppMetricsDao topicAppMetricsDao;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private BrokerMetricsDao brokerMetricsDao;
|
private TopicRequestMetricsDao topicRequestMetricsDao;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private ClusterMetricsDao clusterMetricsDao;
|
private BrokerMetricsDao brokerMetricsDao;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private ConfigUtils configUtils;
|
private ClusterMetricsDao clusterMetricsDao;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TopicThrottledMetricsDao topicThrottledMetricsDao;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<EmptyEntry> listAllTasks() {
|
public List<EmptyEntry> listAllTasks() {
|
||||||
@@ -50,8 +54,8 @@ public class DeleteMetrics extends AbstractScheduledTask<EmptyEntry> {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void processTask(EmptyEntry entryEntry) {
|
public void processTask(EmptyEntry entryEntry) {
|
||||||
if (!"dev".equals(configUtils.getKafkaManagerEnv())) {
|
if (Constant.INVALID_CODE.equals(configUtils.getMaxMetricsSaveDays())) {
|
||||||
// 非预发&线上环境直接跳过
|
// 无需数据删除
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -75,6 +79,12 @@ public class DeleteMetrics extends AbstractScheduledTask<EmptyEntry> {
|
|||||||
LOGGER.error("delete topic request metrics failed.", e);
|
LOGGER.error("delete topic request metrics failed.", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
deleteThrottledMetrics();
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error("delete topic throttled metrics failed.", e);
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
deleteBrokerMetrics();
|
deleteBrokerMetrics();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
@@ -90,27 +100,32 @@ public class DeleteMetrics extends AbstractScheduledTask<EmptyEntry> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void deleteTopicMetrics() {
|
private void deleteTopicMetrics() {
|
||||||
Date endTime = new Date(System.currentTimeMillis() - 3 * 24 * 60 * 60 * 1000);
|
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
|
||||||
topicMetricsDao.deleteBeforeTime(endTime);
|
topicMetricsDao.deleteBeforeTime(endTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void deleteTopicAppMetrics() {
|
private void deleteTopicAppMetrics() {
|
||||||
Date endTime = new Date(System.currentTimeMillis() - 3 * 24 * 60 * 60 * 1000);
|
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
|
||||||
topicAppMetricsDao.deleteBeforeTime(endTime);
|
topicAppMetricsDao.deleteBeforeTime(endTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void deleteTopicRequestMetrics() {
|
private void deleteTopicRequestMetrics() {
|
||||||
Date endTime = new Date(System.currentTimeMillis() - 3 * 24 * 60 * 60 * 1000);
|
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
|
||||||
topicRequestMetricsDao.deleteBeforeTime(endTime);
|
topicRequestMetricsDao.deleteBeforeTime(endTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void deleteThrottledMetrics() {
|
||||||
|
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
|
||||||
|
topicThrottledMetricsDao.deleteBeforeTime(endTime);
|
||||||
|
}
|
||||||
|
|
||||||
private void deleteBrokerMetrics() {
|
private void deleteBrokerMetrics() {
|
||||||
Date endTime = new Date(System.currentTimeMillis() - 7 * 24 * 60 * 60 * 1000);
|
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
|
||||||
brokerMetricsDao.deleteBeforeTime(endTime);
|
brokerMetricsDao.deleteBeforeTime(endTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void deleteClusterMetrics() {
|
private void deleteClusterMetrics() {
|
||||||
Date endTime = new Date(System.currentTimeMillis() - 7 * 24 * 60 * 60 * 1000);
|
Date endTime = new Date(System.currentTimeMillis() - configUtils.getMaxMetricsSaveDays() * 24 * 60 * 60 * 1000);
|
||||||
clusterMetricsDao.deleteBeforeTime(endTime);
|
clusterMetricsDao.deleteBeforeTime(endTime);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -23,6 +23,7 @@ import com.xiaojukeji.kafka.manager.task.component.CustomScheduled;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@@ -34,6 +35,7 @@ import java.util.Map;
|
|||||||
* @date 20/5/7
|
* @date 20/5/7
|
||||||
*/
|
*/
|
||||||
@CustomScheduled(name = "storeBrokerMetrics", cron = "21 0/1 * * * ?", threadNum = 2)
|
@CustomScheduled(name = "storeBrokerMetrics", cron = "21 0/1 * * * ?", threadNum = 2)
|
||||||
|
@ConditionalOnProperty(prefix = "custom.store-metrics-task.community", name = "broker-metrics-enabled", havingValue = "true", matchIfMissing = true)
|
||||||
public class StoreBrokerMetrics extends AbstractScheduledTask<ClusterDO> {
|
public class StoreBrokerMetrics extends AbstractScheduledTask<ClusterDO> {
|
||||||
private static final Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
private static final Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import com.xiaojukeji.kafka.manager.task.component.CustomScheduled;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
@@ -25,6 +26,7 @@ import java.util.*;
|
|||||||
* @date 20/7/21
|
* @date 20/7/21
|
||||||
*/
|
*/
|
||||||
@CustomScheduled(name = "storeDiDiAppTopicMetrics", cron = "41 0/1 * * * ?", threadNum = 5)
|
@CustomScheduled(name = "storeDiDiAppTopicMetrics", cron = "41 0/1 * * * ?", threadNum = 5)
|
||||||
|
@ConditionalOnProperty(prefix = "custom.store-metrics-task.didi", name = "app-topic-metrics-enabled", havingValue = "true", matchIfMissing = true)
|
||||||
public class StoreDiDiAppTopicMetrics extends AbstractScheduledTask<ClusterDO> {
|
public class StoreDiDiAppTopicMetrics extends AbstractScheduledTask<ClusterDO> {
|
||||||
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import com.xiaojukeji.kafka.manager.task.component.CustomScheduled;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
@@ -24,6 +25,7 @@ import java.util.*;
|
|||||||
* @date 20/7/21
|
* @date 20/7/21
|
||||||
*/
|
*/
|
||||||
@CustomScheduled(name = "storeDiDiTopicRequestTimeMetrics", cron = "51 0/1 * * * ?", threadNum = 5)
|
@CustomScheduled(name = "storeDiDiTopicRequestTimeMetrics", cron = "51 0/1 * * * ?", threadNum = 5)
|
||||||
|
@ConditionalOnProperty(prefix = "custom.store-metrics-task.didi", name = "topic-request-time-metrics-enabled", havingValue = "true", matchIfMissing = true)
|
||||||
public class StoreDiDiTopicRequestTimeMetrics extends AbstractScheduledTask<ClusterDO> {
|
public class StoreDiDiTopicRequestTimeMetrics extends AbstractScheduledTask<ClusterDO> {
|
||||||
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
||||||
|
|
||||||
|
|||||||
@@ -69,6 +69,7 @@ public class AutoHandleTopicOrder extends AbstractScheduledTask<EmptyEntry> {
|
|||||||
return ;
|
return ;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Integer maxPassedOrderNumPerTask = configService.getAutoPassedTopicApplyOrderNumPerTask();
|
||||||
for (OrderDO orderDO: doList) {
|
for (OrderDO orderDO: doList) {
|
||||||
if (!OrderTypeEnum.APPLY_TOPIC.getCode().equals(orderDO.getType())) {
|
if (!OrderTypeEnum.APPLY_TOPIC.getCode().equals(orderDO.getType())) {
|
||||||
continue;
|
continue;
|
||||||
@@ -77,7 +78,11 @@ public class AutoHandleTopicOrder extends AbstractScheduledTask<EmptyEntry> {
|
|||||||
if (!handleApplyTopicOrder(orderDO)) {
|
if (!handleApplyTopicOrder(orderDO)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
return;
|
maxPassedOrderNumPerTask -= 1;
|
||||||
|
if (maxPassedOrderNumPerTask <= 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
LOGGER.info("class=AutoHandleTopicOrder||method=processTask||msg=passed id:{}", orderDO.getId());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("handle apply topic order failed, orderDO:{}.", orderDO, e);
|
LOGGER.error("handle apply topic order failed, orderDO:{}.", orderDO, e);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.xiaojukeji.kafka.manager.task.dispatch.metrics.store;
|
package com.xiaojukeji.kafka.manager.task.listener;
|
||||||
|
|
||||||
import com.xiaojukeji.kafka.manager.monitor.common.entry.bizenum.MonitorMetricNameEnum;
|
import com.xiaojukeji.kafka.manager.monitor.common.entry.bizenum.MonitorMetricNameEnum;
|
||||||
import com.xiaojukeji.kafka.manager.common.constant.LogConstant;
|
import com.xiaojukeji.kafka.manager.common.constant.LogConstant;
|
||||||
@@ -11,6 +11,7 @@ import com.xiaojukeji.kafka.manager.service.utils.MetricsConvertUtils;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
import org.springframework.context.ApplicationListener;
|
import org.springframework.context.ApplicationListener;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
@@ -22,6 +23,7 @@ import java.util.List;
|
|||||||
* @date 20/9/1
|
* @date 20/9/1
|
||||||
*/
|
*/
|
||||||
@Component("storeCommunityTopicMetrics2DB")
|
@Component("storeCommunityTopicMetrics2DB")
|
||||||
|
@ConditionalOnProperty(prefix = "custom.store-metrics-task.community", name = "topic-metrics-enabled", havingValue = "true", matchIfMissing = true)
|
||||||
public class StoreCommunityTopicMetrics2DB implements ApplicationListener<TopicMetricsCollectedEvent> {
|
public class StoreCommunityTopicMetrics2DB implements ApplicationListener<TopicMetricsCollectedEvent> {
|
||||||
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import com.xiaojukeji.kafka.manager.task.common.TopicThrottledMetricsCollectedEv
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
import org.springframework.context.ApplicationListener;
|
import org.springframework.context.ApplicationListener;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
@@ -21,6 +22,7 @@ import java.util.*;
|
|||||||
* @date 20/9/24
|
* @date 20/9/24
|
||||||
*/
|
*/
|
||||||
@Component("storeTopicThrottledMetrics2DB")
|
@Component("storeTopicThrottledMetrics2DB")
|
||||||
|
@ConditionalOnProperty(prefix = "custom.store-metrics-task.didi", name = "topic-throttled-metrics", havingValue = "true", matchIfMissing = true)
|
||||||
public class StoreTopicThrottledMetrics2DB implements ApplicationListener<TopicThrottledMetricsCollectedEvent> {
|
public class StoreTopicThrottledMetrics2DB implements ApplicationListener<TopicThrottledMetricsCollectedEvent> {
|
||||||
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
private final static Logger LOGGER = LoggerFactory.getLogger(LogConstant.SCHEDULED_TASK_LOGGER);
|
||||||
|
|
||||||
|
|||||||
@@ -31,9 +31,6 @@ public class FlushClusterMetadata {
|
|||||||
Set<Long> oldClusterIdSet = physicalClusterMetadataManager.getClusterIdSet();
|
Set<Long> oldClusterIdSet = physicalClusterMetadataManager.getClusterIdSet();
|
||||||
for (ClusterDO clusterDO: doList) {
|
for (ClusterDO clusterDO: doList) {
|
||||||
newClusterIdSet.add(clusterDO.getId());
|
newClusterIdSet.add(clusterDO.getId());
|
||||||
if (oldClusterIdSet.contains(clusterDO.getId())) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 添加集群
|
// 添加集群
|
||||||
physicalClusterMetadataManager.addNew(clusterDO);
|
physicalClusterMetadataManager.addNew(clusterDO);
|
||||||
|
|||||||
44
kafka-manager-web/assembly.xml
Normal file
44
kafka-manager-web/assembly.xml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
<assembly
|
||||||
|
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||||
|
<id>bin</id>
|
||||||
|
<formats>
|
||||||
|
<format>dir</format>
|
||||||
|
<format>tar.gz</format>
|
||||||
|
</formats>
|
||||||
|
<fileSets>
|
||||||
|
<fileSet>
|
||||||
|
<includes>
|
||||||
|
<include>bin/*</include>
|
||||||
|
</includes>
|
||||||
|
<fileMode>0755</fileMode>
|
||||||
|
</fileSet>
|
||||||
|
|
||||||
|
<fileSet>
|
||||||
|
<directory>../docs/install_guide</directory>
|
||||||
|
<outputDirectory>install</outputDirectory>
|
||||||
|
<includes>
|
||||||
|
<include>*</include>
|
||||||
|
</includes>
|
||||||
|
</fileSet>
|
||||||
|
|
||||||
|
<fileSet>
|
||||||
|
<directory>src/main/resources/</directory>
|
||||||
|
<outputDirectory>conf</outputDirectory>
|
||||||
|
<includes>
|
||||||
|
<include>application.yml</include>
|
||||||
|
<include>logback-spring.xml</include>
|
||||||
|
</includes>
|
||||||
|
</fileSet>
|
||||||
|
|
||||||
|
<fileSet>
|
||||||
|
<directory>${project.build.directory}</directory>
|
||||||
|
<outputDirectory>libs</outputDirectory>
|
||||||
|
<includes>
|
||||||
|
<include>*.jar</include>
|
||||||
|
</includes>
|
||||||
|
</fileSet>
|
||||||
|
|
||||||
|
</fileSets>
|
||||||
|
</assembly>
|
||||||
@@ -122,6 +122,26 @@
|
|||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
<!--<plugin>-->
|
||||||
|
<!--<groupId>org.apache.maven.plugins</groupId>-->
|
||||||
|
<!--<artifactId>maven-assembly-plugin</artifactId>-->
|
||||||
|
<!--<executions>-->
|
||||||
|
<!--<execution>-->
|
||||||
|
<!--<id>make-assembly</id>-->
|
||||||
|
<!--<phase>package</phase>-->
|
||||||
|
<!--<goals>-->
|
||||||
|
<!--<goal>single</goal>-->
|
||||||
|
<!--</goals>-->
|
||||||
|
<!--<configuration>-->
|
||||||
|
<!--<finalName>kafka-manager-${project.version}</finalName>-->
|
||||||
|
<!--<descriptors>-->
|
||||||
|
<!--<descriptor>assembly.xml</descriptor>-->
|
||||||
|
<!--</descriptors>-->
|
||||||
|
<!--<tarLongFileMode>posix</tarLongFileMode>-->
|
||||||
|
<!--</configuration>-->
|
||||||
|
<!--</execution>-->
|
||||||
|
<!--</executions>-->
|
||||||
|
<!--</plugin>-->
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</project>
|
</project>
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import com.alibaba.fastjson.JSONObject;
|
|||||||
import com.xiaojukeji.kafka.manager.common.annotations.ApiLevel;
|
import com.xiaojukeji.kafka.manager.common.annotations.ApiLevel;
|
||||||
import com.xiaojukeji.kafka.manager.common.constant.ApiLevelContent;
|
import com.xiaojukeji.kafka.manager.common.constant.ApiLevelContent;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.TopicConnectionDO;
|
||||||
import com.xiaojukeji.kafka.manager.common.utils.JsonUtils;
|
import com.xiaojukeji.kafka.manager.common.utils.JsonUtils;
|
||||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||||
import com.xiaojukeji.kafka.manager.service.service.gateway.TopicConnectionService;
|
import com.xiaojukeji.kafka.manager.service.service.gateway.TopicConnectionService;
|
||||||
@@ -15,6 +16,8 @@ import org.slf4j.LoggerFactory;
|
|||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.web.bind.annotation.*;
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author zengqiao
|
* @author zengqiao
|
||||||
* @date 20/7/6
|
* @date 20/7/6
|
||||||
@@ -35,15 +38,22 @@ public class GatewayHeartbeatController {
|
|||||||
public Result receiveTopicConnections(@RequestParam("clusterId") Long clusterId,
|
public Result receiveTopicConnections(@RequestParam("clusterId") Long clusterId,
|
||||||
@RequestParam("brokerId") Integer brokerId,
|
@RequestParam("brokerId") Integer brokerId,
|
||||||
@RequestBody JSONObject jsonObject) {
|
@RequestBody JSONObject jsonObject) {
|
||||||
try {
|
if (ValidateUtils.isNull(jsonObject) || jsonObject.isEmpty()) {
|
||||||
if (ValidateUtils.isNull(jsonObject) || jsonObject.isEmpty()) {
|
LOGGER.info("class=GatewayHeartbeatController||method=receiveTopicConnections||clusterId={}||brokerId={}||msg=connections empty!", clusterId, brokerId);
|
||||||
return Result.buildSuc();
|
|
||||||
}
|
|
||||||
topicConnectionService.batchAdd(JsonUtils.parseTopicConnections(clusterId, jsonObject));
|
|
||||||
return Result.buildSuc();
|
return Result.buildSuc();
|
||||||
} catch (Exception e) {
|
|
||||||
LOGGER.error("receive topic connections failed, clusterId:{} brokerId:{} req:{}", clusterId, brokerId, jsonObject, e);
|
|
||||||
}
|
}
|
||||||
return Result.buildFailure("fail");
|
|
||||||
|
LOGGER.info("class=GatewayHeartbeatController||method=receiveTopicConnections||clusterId={}||brokerId={}||size={}||msg=receive connections", clusterId, brokerId, jsonObject.size());
|
||||||
|
|
||||||
|
List<TopicConnectionDO> doList = null;
|
||||||
|
try {
|
||||||
|
doList = JsonUtils.parseTopicConnections(clusterId, jsonObject);
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error("class=GatewayHeartbeatController||method=receiveTopicConnections||clusterId={}||brokerId={}||msg=parse data failed||exception={}", clusterId, brokerId, e.getMessage());
|
||||||
|
return Result.buildFailure("fail");
|
||||||
|
}
|
||||||
|
|
||||||
|
topicConnectionService.batchAdd(doList);
|
||||||
|
return Result.buildSuc();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -14,6 +14,8 @@ import com.xiaojukeji.kafka.manager.service.service.gateway.GatewayConfigService
|
|||||||
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
|
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
|
||||||
import io.swagger.annotations.Api;
|
import io.swagger.annotations.Api;
|
||||||
import io.swagger.annotations.ApiOperation;
|
import io.swagger.annotations.ApiOperation;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.web.bind.annotation.*;
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
|
||||||
@@ -29,6 +31,9 @@ import java.util.Map;
|
|||||||
@RestController
|
@RestController
|
||||||
@RequestMapping(ApiPrefix.GATEWAY_API_V1_PREFIX)
|
@RequestMapping(ApiPrefix.GATEWAY_API_V1_PREFIX)
|
||||||
public class GatewayServiceDiscoveryController {
|
public class GatewayServiceDiscoveryController {
|
||||||
|
|
||||||
|
private final static Logger LOGGER = LoggerFactory.getLogger(GatewayHeartbeatController.class);
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private GatewayConfigService gatewayConfigService;
|
private GatewayConfigService gatewayConfigService;
|
||||||
|
|
||||||
@@ -38,6 +43,7 @@ public class GatewayServiceDiscoveryController {
|
|||||||
@ResponseBody
|
@ResponseBody
|
||||||
public String getKafkaBootstrapServer(@RequestParam("clusterId") Long clusterId) {
|
public String getKafkaBootstrapServer(@RequestParam("clusterId") Long clusterId) {
|
||||||
if (ValidateUtils.isNull(clusterId)) {
|
if (ValidateUtils.isNull(clusterId)) {
|
||||||
|
LOGGER.warn("class=GatewayServiceDiscoveryController||method=getKafkaBootstrapServer||msg=param clusterId is null!");
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
GatewayConfigDO configDO = gatewayConfigService.getByTypeAndName(
|
GatewayConfigDO configDO = gatewayConfigService.getByTypeAndName(
|
||||||
@@ -45,6 +51,7 @@ public class GatewayServiceDiscoveryController {
|
|||||||
String.valueOf(clusterId)
|
String.valueOf(clusterId)
|
||||||
);
|
);
|
||||||
if (ValidateUtils.isNull(configDO)) {
|
if (ValidateUtils.isNull(configDO)) {
|
||||||
|
LOGGER.info("class=GatewayServiceDiscoveryController||method=getKafkaBootstrapServer||msg=configDO is null!");
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
return configDO.getValue();
|
return configDO.getValue();
|
||||||
|
|||||||
@@ -9,8 +9,11 @@ import com.xiaojukeji.kafka.manager.common.entity.vo.common.AccountSummaryVO;
|
|||||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||||
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
|
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
|
||||||
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
|
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
|
||||||
|
import com.xiaojukeji.kafka.manager.web.api.versionone.gateway.GatewayHeartbeatController;
|
||||||
import io.swagger.annotations.Api;
|
import io.swagger.annotations.Api;
|
||||||
import io.swagger.annotations.ApiOperation;
|
import io.swagger.annotations.ApiOperation;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.web.bind.annotation.*;
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
|
||||||
@@ -25,6 +28,9 @@ import java.util.List;
|
|||||||
@RestController
|
@RestController
|
||||||
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
|
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
|
||||||
public class NormalAccountController {
|
public class NormalAccountController {
|
||||||
|
|
||||||
|
private final static Logger LOGGER = LoggerFactory.getLogger(NormalAccountController.class);
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private AccountService accountService;
|
private AccountService accountService;
|
||||||
|
|
||||||
@@ -34,6 +40,8 @@ public class NormalAccountController {
|
|||||||
public Result<List<AccountSummaryVO>> searchOnJobStaffByKeyWord(@RequestParam("keyWord") String keyWord) {
|
public Result<List<AccountSummaryVO>> searchOnJobStaffByKeyWord(@RequestParam("keyWord") String keyWord) {
|
||||||
List<EnterpriseStaff> staffList = accountService.searchAccountByPrefix(keyWord);
|
List<EnterpriseStaff> staffList = accountService.searchAccountByPrefix(keyWord);
|
||||||
if (ValidateUtils.isEmptyList(staffList)) {
|
if (ValidateUtils.isEmptyList(staffList)) {
|
||||||
|
LOGGER.info("class=NormalAccountController||method=searchOnJobStaffByKeyWord||keyWord={}||msg=staffList is empty!"
|
||||||
|
,keyWord);
|
||||||
return new Result<>();
|
return new Result<>();
|
||||||
}
|
}
|
||||||
List<AccountSummaryVO> voList = new ArrayList<>();
|
List<AccountSummaryVO> voList = new ArrayList<>();
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
|
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
|
||||||
|
|
||||||
|
import com.xiaojukeji.kafka.manager.account.AccountService;
|
||||||
import com.xiaojukeji.kafka.manager.common.annotations.ApiLevel;
|
import com.xiaojukeji.kafka.manager.common.annotations.ApiLevel;
|
||||||
import com.xiaojukeji.kafka.manager.common.bizenum.TopicAuthorityEnum;
|
import com.xiaojukeji.kafka.manager.common.bizenum.TopicAuthorityEnum;
|
||||||
import com.xiaojukeji.kafka.manager.common.constant.ApiLevelContent;
|
import com.xiaojukeji.kafka.manager.common.constant.ApiLevelContent;
|
||||||
@@ -8,6 +9,7 @@ import com.xiaojukeji.kafka.manager.common.entity.Result;
|
|||||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.ao.AppTopicDTO;
|
import com.xiaojukeji.kafka.manager.common.entity.ao.AppTopicDTO;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.AppDTO;
|
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.AppDTO;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.AppDO;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.QuotaVO;
|
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.QuotaVO;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.app.AppTopicAuthorityVO;
|
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.app.AppTopicAuthorityVO;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.app.AppTopicVO;
|
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.app.AppTopicVO;
|
||||||
@@ -45,6 +47,9 @@ public class NormalAppController {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private AppService appService;
|
private AppService appService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private AccountService accountService;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private QuotaService quotaService;
|
private QuotaService quotaService;
|
||||||
|
|
||||||
@@ -71,9 +76,16 @@ public class NormalAppController {
|
|||||||
@RequestMapping(value = "apps/{appId}/basic-info", method = RequestMethod.GET)
|
@RequestMapping(value = "apps/{appId}/basic-info", method = RequestMethod.GET)
|
||||||
@ResponseBody
|
@ResponseBody
|
||||||
public Result<AppVO> getAppBasicInfo(@PathVariable String appId) {
|
public Result<AppVO> getAppBasicInfo(@PathVariable String appId) {
|
||||||
return new Result<>(AppConverter.convert2AppVO(
|
if (accountService.isAdminOrderHandler(SpringTool.getUserName())) {
|
||||||
appService.getByAppId(appId))
|
return new Result<>(AppConverter.convert2AppVO(appService.getByAppId(appId)));
|
||||||
);
|
}
|
||||||
|
|
||||||
|
AppDO appDO = appService.getAppByUserAndId(appId, SpringTool.getUserName());
|
||||||
|
if (appDO == null) {
|
||||||
|
return Result.buildFrom(ResultStatus.USER_WITHOUT_AUTHORITY);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Result<>(AppConverter.convert2AppVO(appDO));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ApiOperation(value = "App修改", notes = "")
|
@ApiOperation(value = "App修改", notes = "")
|
||||||
|
|||||||
@@ -79,6 +79,7 @@ public class NormalConsumerController {
|
|||||||
@RequestParam("location") String location,
|
@RequestParam("location") String location,
|
||||||
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
|
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
|
||||||
if (ValidateUtils.isNull(location)) {
|
if (ValidateUtils.isNull(location)) {
|
||||||
|
|
||||||
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
|
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
|
||||||
}
|
}
|
||||||
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
|
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import com.xiaojukeji.kafka.manager.common.constant.Constant;
|
|||||||
import com.xiaojukeji.kafka.manager.common.constant.KafkaMetricsCollections;
|
import com.xiaojukeji.kafka.manager.common.constant.KafkaMetricsCollections;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.TopicConnection;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.TopicPartitionDTO;
|
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.TopicPartitionDTO;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.TopicDataSampleDTO;
|
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.TopicDataSampleDTO;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.metrics.BaseMetrics;
|
import com.xiaojukeji.kafka.manager.common.entity.metrics.BaseMetrics;
|
||||||
@@ -14,6 +15,7 @@ import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
|
|||||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
|
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
|
||||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.KafkaBillDO;
|
import com.xiaojukeji.kafka.manager.common.entity.pojo.KafkaBillDO;
|
||||||
|
import com.xiaojukeji.kafka.manager.common.utils.jmx.JmxAttributeEnum;
|
||||||
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
|
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
|
||||||
import com.xiaojukeji.kafka.manager.service.cache.PhysicalClusterMetadataManager;
|
import com.xiaojukeji.kafka.manager.service.cache.PhysicalClusterMetadataManager;
|
||||||
import com.xiaojukeji.kafka.manager.service.service.*;
|
import com.xiaojukeji.kafka.manager.service.service.*;
|
||||||
@@ -134,18 +136,27 @@ public class NormalTopicController {
|
|||||||
public Result<List<TopicRequestTimeDetailVO>> getTopicRequestMetrics(
|
public Result<List<TopicRequestTimeDetailVO>> getTopicRequestMetrics(
|
||||||
@PathVariable Long clusterId,
|
@PathVariable Long clusterId,
|
||||||
@PathVariable String topicName,
|
@PathVariable String topicName,
|
||||||
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
|
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId,
|
||||||
|
@RequestParam(value = "percentile", required = false, defaultValue = "75thPercentile") String percentile) {
|
||||||
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
|
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
|
||||||
if (ValidateUtils.isNull(physicalClusterId)) {
|
if (ValidateUtils.isNull(physicalClusterId)) {
|
||||||
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
|
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Boolean isPercentileLegal = Arrays.stream(JmxAttributeEnum.PERCENTILE_ATTRIBUTE.getAttribute())
|
||||||
|
.anyMatch(percentile::equals);
|
||||||
|
|
||||||
|
if (!isPercentileLegal) {
|
||||||
|
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
|
||||||
|
}
|
||||||
|
|
||||||
BaseMetrics metrics = topicService.getTopicMetricsFromJMX(
|
BaseMetrics metrics = topicService.getTopicMetricsFromJMX(
|
||||||
physicalClusterId,
|
physicalClusterId,
|
||||||
topicName,
|
topicName,
|
||||||
KafkaMetricsCollections.TOPIC_REQUEST_TIME_DETAIL_PAGE_METRICS,
|
KafkaMetricsCollections.TOPIC_REQUEST_TIME_DETAIL_PAGE_METRICS,
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
return new Result<>(TopicModelConverter.convert2TopicRequestTimeDetailVOList(metrics));
|
return new Result<>(TopicModelConverter.convert2TopicRequestTimeDetailVOList(metrics, percentile));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ApiOperation(value = "Topic历史请求耗时信息", notes = "")
|
@ApiOperation(value = "Topic历史请求耗时信息", notes = "")
|
||||||
@@ -184,14 +195,26 @@ public class NormalTopicController {
|
|||||||
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
|
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Result<>(TopicModelConverter.convert2TopicConnectionVOList(
|
List<TopicConnection> connections;
|
||||||
connectionService.getByTopicName(
|
|
||||||
physicalClusterId,
|
if (ValidateUtils.isBlank(appId)) {
|
||||||
topicName,
|
connections = connectionService.getByTopicName(
|
||||||
new Date(System.currentTimeMillis() - Constant.TOPIC_CONNECTION_LATEST_TIME_MS),
|
physicalClusterId,
|
||||||
new Date()
|
topicName,
|
||||||
)
|
new Date(System.currentTimeMillis() - Constant.TOPIC_CONNECTION_LATEST_TIME_MS),
|
||||||
));
|
new Date()
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
connections = connectionService.getByTopicName(
|
||||||
|
physicalClusterId,
|
||||||
|
topicName,
|
||||||
|
appId,
|
||||||
|
new Date(System.currentTimeMillis() - Constant.TOPIC_CONNECTION_LATEST_TIME_MS),
|
||||||
|
new Date()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Result<>(TopicModelConverter.convert2TopicConnectionVOList(connections));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ApiOperation(value = "Topic分区信息", notes = "")
|
@ApiOperation(value = "Topic分区信息", notes = "")
|
||||||
|
|||||||
@@ -70,6 +70,7 @@ public class RdTopicController {
|
|||||||
RdTopicBasicVO vo = new RdTopicBasicVO();
|
RdTopicBasicVO vo = new RdTopicBasicVO();
|
||||||
CopyUtils.copyProperties(vo, result.getData());
|
CopyUtils.copyProperties(vo, result.getData());
|
||||||
vo.setProperties(result.getData().getProperties());
|
vo.setProperties(result.getData().getProperties());
|
||||||
|
vo.setRegionNameList(result.getData().getRegionNameList());
|
||||||
return new Result<>(vo);
|
return new Result<>(vo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
package com.xiaojukeji.kafka.manager.web.config;
|
||||||
|
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.http.client.ClientHttpRequestFactory;
|
||||||
|
import org.springframework.http.client.SimpleClientHttpRequestFactory;
|
||||||
|
import org.springframework.web.client.RestTemplate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* author: mrazkonglingxu
|
||||||
|
* Date: 2020/12/2
|
||||||
|
* Time: 10:48 上午
|
||||||
|
*/
|
||||||
|
@Configuration
|
||||||
|
public class RestTemplateConfig {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ClientHttpRequestFactory simpleClientHttpRequestFactory() {
|
||||||
|
SimpleClientHttpRequestFactory factory = new SimpleClientHttpRequestFactory();
|
||||||
|
factory.setConnectTimeout(5000);
|
||||||
|
factory.setReadTimeout(5000);
|
||||||
|
return factory;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public RestTemplate restTemplate(ClientHttpRequestFactory factory) {
|
||||||
|
return new RestTemplate(factory);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -40,6 +40,7 @@ public class TopicModelConverter {
|
|||||||
if (!ValidateUtils.isNull(clusterDO)) {
|
if (!ValidateUtils.isNull(clusterDO)) {
|
||||||
vo.setBootstrapServers(clusterDO.getBootstrapServers());
|
vo.setBootstrapServers(clusterDO.getBootstrapServers());
|
||||||
}
|
}
|
||||||
|
vo.setRegionNameList(dto.getRegionNameList());
|
||||||
return vo;
|
return vo;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -107,6 +108,54 @@ public class TopicModelConverter {
|
|||||||
return Arrays.asList(produceVO, fetchVO);
|
return Arrays.asList(produceVO, fetchVO);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static List<TopicRequestTimeDetailVO> convert2TopicRequestTimeDetailVOList(BaseMetrics metrics, String percentile) {
|
||||||
|
if (ValidateUtils.isNull(metrics)) {
|
||||||
|
return new ArrayList<>();
|
||||||
|
}
|
||||||
|
TopicRequestTimeDetailVO produceVO = new TopicRequestTimeDetailVO();
|
||||||
|
produceVO.setRequestTimeType("RequestProduceTime");
|
||||||
|
fillTopicProduceTime(produceVO, metrics, percentile);
|
||||||
|
|
||||||
|
TopicRequestTimeDetailVO fetchVO = new TopicRequestTimeDetailVO();
|
||||||
|
fetchVO.setRequestTimeType("RequestFetchTime");
|
||||||
|
fillTopicFetchTime(fetchVO, metrics, percentile);
|
||||||
|
|
||||||
|
TopicMetrics topicMetrics = (TopicMetrics) metrics;
|
||||||
|
if (!ValidateUtils.isEmptyList(topicMetrics.getBrokerMetricsList())) {
|
||||||
|
List<TopicBrokerRequestTimeVO> brokerProduceTimeList = new ArrayList<>();
|
||||||
|
List<TopicBrokerRequestTimeVO> brokerFetchTimeList = new ArrayList<>();
|
||||||
|
topicMetrics.getBrokerMetricsList().forEach(brokerMetrics -> {
|
||||||
|
TopicBrokerRequestTimeVO topicBrokerProduceReq = new TopicBrokerRequestTimeVO();
|
||||||
|
topicBrokerProduceReq.setClusterId(brokerMetrics.getClusterId());
|
||||||
|
topicBrokerProduceReq.setBrokerId(brokerMetrics.getBrokerId());
|
||||||
|
|
||||||
|
TopicRequestTimeDetailVO brokerProduceVO = new TopicRequestTimeDetailVO();
|
||||||
|
brokerProduceVO.setRequestTimeType("BrokerRequestProduceTime");
|
||||||
|
fillTopicProduceTime(brokerProduceVO, brokerMetrics, percentile);
|
||||||
|
|
||||||
|
topicBrokerProduceReq.setBrokerRequestTime(brokerProduceVO);
|
||||||
|
|
||||||
|
TopicBrokerRequestTimeVO topicBrokerFetchReq = new TopicBrokerRequestTimeVO();
|
||||||
|
topicBrokerFetchReq.setClusterId(brokerMetrics.getClusterId());
|
||||||
|
topicBrokerFetchReq.setBrokerId(brokerMetrics.getBrokerId());
|
||||||
|
|
||||||
|
TopicRequestTimeDetailVO brokerFetchVO = new TopicRequestTimeDetailVO();
|
||||||
|
brokerProduceVO.setRequestTimeType("BrokerRequestFetchTime");
|
||||||
|
fillTopicFetchTime(brokerFetchVO, brokerMetrics, percentile);
|
||||||
|
|
||||||
|
topicBrokerFetchReq.setBrokerRequestTime(brokerFetchVO);
|
||||||
|
|
||||||
|
brokerProduceTimeList.add(topicBrokerProduceReq);
|
||||||
|
brokerFetchTimeList.add(topicBrokerFetchReq);
|
||||||
|
});
|
||||||
|
|
||||||
|
produceVO.setBrokerRequestTimeList(brokerProduceTimeList);
|
||||||
|
fetchVO.setBrokerRequestTimeList(brokerFetchTimeList);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Arrays.asList(produceVO, fetchVO);
|
||||||
|
}
|
||||||
|
|
||||||
public static List<TopicConnectionVO> convert2TopicConnectionVOList(List<TopicConnection> connectionDTOList) {
|
public static List<TopicConnectionVO> convert2TopicConnectionVOList(List<TopicConnection> connectionDTOList) {
|
||||||
if (ValidateUtils.isNull(connectionDTOList)) {
|
if (ValidateUtils.isNull(connectionDTOList)) {
|
||||||
return new ArrayList<>();
|
return new ArrayList<>();
|
||||||
@@ -224,4 +273,24 @@ public class TopicModelConverter {
|
|||||||
CopyUtils.copyProperties(topicBusinessInfoVO,topicBusinessInfo);
|
CopyUtils.copyProperties(topicBusinessInfoVO,topicBusinessInfo);
|
||||||
return topicBusinessInfoVO;
|
return topicBusinessInfoVO;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void fillTopicProduceTime(TopicRequestTimeDetailVO produceVO, BaseMetrics metrics, String thPercentile) {
|
||||||
|
produceVO.setRequestQueueTimeMs(metrics.getSpecifiedMetrics("ProduceRequestQueueTimeMs" + thPercentile));
|
||||||
|
produceVO.setResponseQueueTimeMs(metrics.getSpecifiedMetrics("ProduceResponseQueueTimeMs" + thPercentile));
|
||||||
|
produceVO.setResponseSendTimeMs(metrics.getSpecifiedMetrics("ProduceResponseSendTimeMs" + thPercentile));
|
||||||
|
produceVO.setLocalTimeMs(metrics.getSpecifiedMetrics("ProduceLocalTimeMs" + thPercentile));
|
||||||
|
produceVO.setThrottleTimeMs(metrics.getSpecifiedMetrics("ProduceThrottleTimeMs" + thPercentile));
|
||||||
|
produceVO.setRemoteTimeMs(metrics.getSpecifiedMetrics("ProduceRemoteTimeMs" + thPercentile));
|
||||||
|
produceVO.setTotalTimeMs(metrics.getSpecifiedMetrics("ProduceTotalTimeMs" + thPercentile));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void fillTopicFetchTime(TopicRequestTimeDetailVO fetchVO, BaseMetrics metrics, String thPercentile) {
|
||||||
|
fetchVO.setRequestQueueTimeMs(metrics.getSpecifiedMetrics("FetchConsumerRequestQueueTimeMs" + thPercentile));
|
||||||
|
fetchVO.setResponseQueueTimeMs(metrics.getSpecifiedMetrics("FetchConsumerResponseQueueTimeMs" + thPercentile));
|
||||||
|
fetchVO.setResponseSendTimeMs(metrics.getSpecifiedMetrics("FetchConsumerResponseSendTimeMs" + thPercentile));
|
||||||
|
fetchVO.setLocalTimeMs(metrics.getSpecifiedMetrics("FetchConsumerLocalTimeMs" + thPercentile));
|
||||||
|
fetchVO.setThrottleTimeMs(metrics.getSpecifiedMetrics("FetchConsumerThrottleTimeMs" + thPercentile));
|
||||||
|
fetchVO.setRemoteTimeMs(metrics.getSpecifiedMetrics("FetchConsumerRemoteTimeMs" + thPercentile));
|
||||||
|
fetchVO.setTotalTimeMs(metrics.getSpecifiedMetrics("FetchConsumerTotalTimeMs" + thPercentile));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,8 +22,8 @@ spring:
|
|||||||
active: dev
|
active: dev
|
||||||
servlet:
|
servlet:
|
||||||
multipart:
|
multipart:
|
||||||
max-file-size: 60MB
|
max-file-size: 100MB
|
||||||
max-request-size: 60MB
|
max-request-size: 100MB
|
||||||
|
|
||||||
logging:
|
logging:
|
||||||
config: classpath:logback-spring.xml
|
config: classpath:logback-spring.xml
|
||||||
@@ -32,24 +32,35 @@ custom:
|
|||||||
idc: cn
|
idc: cn
|
||||||
jmx:
|
jmx:
|
||||||
max-conn: 10
|
max-conn: 10
|
||||||
|
store-metrics-task:
|
||||||
|
community:
|
||||||
|
broker-metrics-enabled: true
|
||||||
|
topic-metrics-enabled: true
|
||||||
|
didi:
|
||||||
|
app-topic-metrics-enabled: true
|
||||||
|
topic-request-time-metrics-enabled: true
|
||||||
|
topic-throttled-metrics: true
|
||||||
|
save-days: 7
|
||||||
|
|
||||||
account:
|
account:
|
||||||
ldap:
|
ldap:
|
||||||
|
|
||||||
kcm:
|
kcm:
|
||||||
enabled: false
|
enabled: false
|
||||||
|
storage:
|
||||||
|
base-url: http://127.0.0.1
|
||||||
n9e:
|
n9e:
|
||||||
base-url: http://127.0.0.1:8080
|
base-url: http://127.0.0.1:8004
|
||||||
user-token: 12345678
|
user-token: 12345678
|
||||||
timeout: 300
|
timeout: 300
|
||||||
account: km
|
account: root
|
||||||
script-file: kcm_script.sh
|
script-file: kcm_script.sh
|
||||||
|
|
||||||
monitor:
|
monitor:
|
||||||
enabled: false
|
enabled: false
|
||||||
n9e:
|
n9e:
|
||||||
nid: 2
|
nid: 2
|
||||||
user-token: 123456
|
user-token: 1234567890
|
||||||
mon:
|
mon:
|
||||||
base-url: http://127.0.0.1:8032
|
base-url: http://127.0.0.1:8032
|
||||||
sink:
|
sink:
|
||||||
|
|||||||
@@ -143,7 +143,7 @@
|
|||||||
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
|
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
|
||||||
<maxFileSize>100MB</maxFileSize>
|
<maxFileSize>100MB</maxFileSize>
|
||||||
</timeBasedFileNamingAndTriggeringPolicy>
|
</timeBasedFileNamingAndTriggeringPolicy>
|
||||||
<maxHistory>5</maxHistory>
|
<maxHistory>3</maxHistory>
|
||||||
</rollingPolicy>
|
</rollingPolicy>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
@@ -159,7 +159,7 @@
|
|||||||
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
|
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
|
||||||
<maxFileSize>100MB</maxFileSize>
|
<maxFileSize>100MB</maxFileSize>
|
||||||
</timeBasedFileNamingAndTriggeringPolicy>
|
</timeBasedFileNamingAndTriggeringPolicy>
|
||||||
<maxHistory>5</maxHistory>
|
<maxHistory>3</maxHistory>
|
||||||
</rollingPolicy>
|
</rollingPolicy>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user