v2.8.0_e初始化

1、测试代码,开源用户尽量不要使用;
2、包含Kafka-HA的相关功能;
3、并非基于2.6.0拉的分支,是基于master分支的 commit-id: 462303fca0 拉的2.8.0_e的分支。出现这个情况的原因是v2.6.0的代码并不是最新的,2.x最新的代码是 462303fca0 这个commit对应的代码;
This commit is contained in:
zengqiao
2023-02-13 16:35:43 +08:00
parent 462303fca0
commit e81c0f3040
178 changed files with 9938 additions and 1674 deletions

View File

@@ -112,5 +112,15 @@
<artifactId>lombok</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate.validator</groupId>
<artifactId>hibernate-validator</artifactId>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,21 @@
package com.xiaojukeji.kafka.manager.common.bizenum;
import lombok.Getter;
@Getter
public enum JobLogBizTypEnum {
HA_SWITCH_JOB_LOG(100, "HA-主备切换日志"),
UNKNOWN(-1, "unknown"),
;
JobLogBizTypEnum(int code, String msg) {
this.code = code;
this.msg = msg;
}
private final int code;
private final String msg;
}

View File

@@ -0,0 +1,42 @@
package com.xiaojukeji.kafka.manager.common.bizenum;
/**
* 任务动作
* @author zengqiao
* @date 20/4/26
*/
public enum TaskActionEnum {
UNKNOWN("unknown"),
START("start"),
PAUSE("pause"),
IGNORE("ignore"),
CANCEL("cancel"),
REDO("redo"),
KILL("kill"),
FORCE("force"),
ROLLBACK("rollback"),
;
private final String action;
TaskActionEnum(String action) {
this.action = action;
}
public String getAction() {
return action;
}
@Override
public String toString() {
return "ClusterTaskActionEnum{" +
"action='" + action + '\'' +
'}';
}
}

View File

@@ -1,10 +1,13 @@
package com.xiaojukeji.kafka.manager.common.bizenum;
import lombok.Getter;
/**
* 任务状态
* @author zengqiao
* @date 2017/6/29.
*/
@Getter
public enum TaskStatusEnum {
UNKNOWN( -1, "未知"),
@@ -15,6 +18,7 @@ public enum TaskStatusEnum {
RUNNING( 30, "运行中"),
KILLING( 31, "杀死中"),
RUNNING_IN_TIMEOUT( 32, "超时运行中"),
BLOCKED( 40, "暂停"),
@@ -30,31 +34,15 @@ public enum TaskStatusEnum {
;
private Integer code;
private final Integer code;
private String message;
private final String message;
TaskStatusEnum(Integer code, String message) {
this.code = code;
this.message = message;
}
public Integer getCode() {
return code;
}
public String getMessage() {
return message;
}
@Override
public String toString() {
return "TaskStatusEnum{" +
"code=" + code +
", message='" + message + '\'' +
'}';
}
public static Boolean isFinished(Integer code) {
return code >= FINISHED.getCode();
}

View File

@@ -17,9 +17,9 @@ public enum TopicAuthorityEnum {
OWNER(4, "可管理"),
;
private Integer code;
private final Integer code;
private String message;
private final String message;
TopicAuthorityEnum(Integer code, String message) {
this.code = code;
@@ -34,6 +34,16 @@ public enum TopicAuthorityEnum {
return message;
}
public static String getMsgByCode(Integer code) {
for (TopicAuthorityEnum authorityEnum: TopicAuthorityEnum.values()) {
if (authorityEnum.getCode().equals(code)) {
return authorityEnum.message;
}
}
return DENY.message;
}
@Override
public String toString() {
return "TopicAuthorityEnum{" +

View File

@@ -10,12 +10,11 @@ public enum GatewayConfigKeyEnum {
SD_APP_RATE("SD_APP_RATE", "SD_APP_RATE"),
SD_IP_RATE("SD_IP_RATE", "SD_IP_RATE"),
SD_SP_RATE("SD_SP_RATE", "SD_SP_RATE"),
;
private String configType;
private final String configType;
private String configName;
private final String configName;
GatewayConfigKeyEnum(String configType, String configName) {
this.configType = configType;

View File

@@ -0,0 +1,27 @@
package com.xiaojukeji.kafka.manager.common.bizenum.ha;
import lombok.Getter;
/**
* @author zengqiao
* @date 20/7/28
*/
@Getter
public enum HaRelationTypeEnum {
UNKNOWN(-1, "非高可用"),
STANDBY(0, ""),
ACTIVE(1, ""),
MUTUAL_BACKUP(2 , "互备");
private final int code;
private final String msg;
HaRelationTypeEnum(int code, String msg) {
this.code = code;
this.msg = msg;
}
}

View File

@@ -0,0 +1,25 @@
package com.xiaojukeji.kafka.manager.common.bizenum.ha;
import lombok.Getter;
/**
* @author zengqiao
* @date 20/7/28
*/
@Getter
public enum HaResTypeEnum {
CLUSTER(0, "Cluster"),
TOPIC(1, "Topic"),
KAFKA_USER(2, "KafkaUser"),
;
private final int code;
private final String msg;
HaResTypeEnum(int code, String msg) {
this.code = code;
this.msg = msg;
}
}

View File

@@ -0,0 +1,75 @@
package com.xiaojukeji.kafka.manager.common.bizenum.ha;
/**
* @author zengqiao
* @date 20/7/28
*/
public enum HaStatusEnum {
UNKNOWN(-1, "未知状态"),
STABLE(HaStatusEnum.STABLE_CODE, "稳定状态"),
// SWITCHING(HaStatusEnum.SWITCHING_CODE, "切换中"),
SWITCHING_PREPARE(
HaStatusEnum.SWITCHING_PREPARE_CODE,
"主备切换--源集群[%s]--预处理(阻止当前主Topic写入)"),
SWITCHING_WAITING_IN_SYNC(
HaStatusEnum.SWITCHING_WAITING_IN_SYNC_CODE,
"主备切换--目标集群[%s]--等待主与备Topic数据同步完成"),
SWITCHING_CLOSE_OLD_STANDBY_TOPIC_FETCH(
HaStatusEnum.SWITCHING_CLOSE_OLD_STANDBY_TOPIC_FETCH_CODE,
"主备切换--目标集群[%s]--关闭旧的备Topic的副本同步"),
SWITCHING_OPEN_NEW_STANDBY_TOPIC_FETCH(
HaStatusEnum.SWITCHING_OPEN_NEW_STANDBY_TOPIC_FETCH_CODE,
"主备切换--源集群[%s]--开启新的备Topic的副本同步"),
SWITCHING_CLOSEOUT(
HaStatusEnum.SWITCHING_CLOSEOUT_CODE,
"主备切换--目标集群[%s]--收尾(允许新的主Topic写入)"),
;
public static final int UNKNOWN_CODE = -1;
public static final int STABLE_CODE = 0;
public static final int SWITCHING_CODE = 100;
public static final int SWITCHING_PREPARE_CODE = 101;
public static final int SWITCHING_WAITING_IN_SYNC_CODE = 102;
public static final int SWITCHING_CLOSE_OLD_STANDBY_TOPIC_FETCH_CODE = 103;
public static final int SWITCHING_OPEN_NEW_STANDBY_TOPIC_FETCH_CODE = 104;
public static final int SWITCHING_CLOSEOUT_CODE = 105;
private final int code;
private final String msg;
public int getCode() {
return code;
}
public String getMsg(String clusterName) {
if (this.code == UNKNOWN_CODE || this.code == STABLE_CODE) {
return this.msg;
}
return String.format(msg, clusterName);
}
HaStatusEnum(int code, String msg) {
this.code = code;
this.msg = msg;
}
public static Integer calProgress(Integer status) {
if (status == null || status == HaStatusEnum.STABLE_CODE || status == UNKNOWN_CODE) {
return 100;
}
// 最小进度为 1%
return Math.max(1, (status - 101) * 100 / 5);
}
}

View File

@@ -0,0 +1,44 @@
package com.xiaojukeji.kafka.manager.common.bizenum.ha.job;
public enum HaJobActionEnum {
/**
*
*/
START(1,"start"),
STOP(2, "stop"),
CANCEL(3,"cancel"),
CONTINUE(4,"continue"),
UNKNOWN(-1, "unknown");
HaJobActionEnum(int status, String value) {
this.status = status;
this.value = value;
}
private final int status;
private final String value;
public int getStatus() {
return status;
}
public String getValue() {
return value;
}
public static HaJobActionEnum valueOfStatus(int status) {
for (HaJobActionEnum statusEnum : HaJobActionEnum.values()) {
if (status == statusEnum.getStatus()) {
return statusEnum;
}
}
return HaJobActionEnum.UNKNOWN;
}
}

View File

@@ -0,0 +1,75 @@
package com.xiaojukeji.kafka.manager.common.bizenum.ha.job;
import com.xiaojukeji.kafka.manager.common.bizenum.TaskStatusEnum;
public enum HaJobStatusEnum {
/**执行中*/
RUNNING(TaskStatusEnum.RUNNING),
RUNNING_IN_TIMEOUT(TaskStatusEnum.RUNNING_IN_TIMEOUT),
SUCCESS(TaskStatusEnum.SUCCEED),
FAILED(TaskStatusEnum.FAILED),
UNKNOWN(TaskStatusEnum.UNKNOWN);
HaJobStatusEnum(TaskStatusEnum taskStatusEnum) {
this.status = taskStatusEnum.getCode();
this.value = taskStatusEnum.getMessage();
}
private final int status;
private final String value;
public int getStatus() {
return status;
}
public String getValue() {
return value;
}
public static HaJobStatusEnum valueOfStatus(int status) {
for (HaJobStatusEnum statusEnum : HaJobStatusEnum.values()) {
if (status == statusEnum.getStatus()) {
return statusEnum;
}
}
return HaJobStatusEnum.UNKNOWN;
}
public static HaJobStatusEnum getStatusBySubStatus(int totalJobNum,
int successJobNu,
int failedJobNu,
int runningJobNu,
int runningInTimeoutJobNu,
int unknownJobNu) {
if (unknownJobNu > 0) {
return UNKNOWN;
}
if((failedJobNu + runningJobNu + runningInTimeoutJobNu + unknownJobNu) == 0) {
return SUCCESS;
}
if((runningJobNu + runningInTimeoutJobNu + unknownJobNu) == 0 && failedJobNu > 0) {
return FAILED;
}
if (runningInTimeoutJobNu > 0) {
return RUNNING_IN_TIMEOUT;
}
return RUNNING;
}
public static boolean isRunning(Integer jobStatus) {
return jobStatus != null && (RUNNING.status == jobStatus || RUNNING_IN_TIMEOUT.status == jobStatus);
}
public static boolean isFinished(Integer jobStatus) {
return jobStatus != null && (SUCCESS.status == jobStatus || FAILED.status == jobStatus);
}
}

View File

@@ -31,6 +31,8 @@ public class ConfigConstant {
public static final String KAFKA_CLUSTER_DO_CONFIG_KEY = "KAFKA_CLUSTER_DO_CONFIG";
public static final String HA_SWITCH_JOB_TIMEOUT_UNIT_SEC_CONFIG_PREFIX = "HA_SWITCH_JOB_TIMEOUT_UNIT_SEC_CONFIG_CLUSTER";
private ConfigConstant() {
}
}

View File

@@ -21,6 +21,32 @@ public class KafkaConstant {
public static final String INTERNAL_KEY = "INTERNAL";
public static final String BOOTSTRAP_SERVERS = "bootstrap.servers";
/**
* HA
*/
public static final String DIDI_KAFKA_ENABLE = "didi.kafka.enable";
public static final String DIDI_HA_REMOTE_CLUSTER = "didi.ha.remote.cluster";
// TODO 平台来管理配置,不需要底层来管理,因此可以删除该配置
public static final String DIDI_HA_SYNC_TOPIC_CONFIGS_ENABLED = "didi.ha.sync.topic.configs.enabled";
public static final String DIDI_HA_ACTIVE_CLUSTER = "didi.ha.active.cluster";
public static final String DIDI_HA_REMOTE_TOPIC = "didi.ha.remote.topic";
public static final String SECURITY_PROTOCOL = "security.protocol";
public static final String SASL_MECHANISM = "sasl.mechanism";
public static final String SASL_JAAS_CONFIG = "sasl.jaas.config";
public static final String NONE = "None";
private KafkaConstant() {
}
}

View File

@@ -0,0 +1,96 @@
package com.xiaojukeji.kafka.manager.common.constant;
/**
* 信息模版Constant
* @author zengqiao
* @date 22/03/03
*/
public class MsgConstant {
private MsgConstant() {
}
/**************************************************** Cluster ****************************************************/
public static String getClusterBizStr(Long clusterPhyId, String clusterName){
return String.format("集群ID:[%d] 集群名称:[%s]", clusterPhyId, clusterName);
}
public static String getClusterPhyNotExist(Long clusterPhyId) {
return String.format("集群ID:[%d] 不存在或者未加载", clusterPhyId);
}
/**************************************************** Broker ****************************************************/
public static String getBrokerNotExist(Long clusterPhyId, Integer brokerId) {
return String.format("集群ID:[%d] brokerId:[%d] 不存在或未存活", clusterPhyId, brokerId);
}
public static String getBrokerBizStr(Long clusterPhyId, Integer brokerId) {
return String.format("集群ID:[%d] brokerId:[%d]", clusterPhyId, brokerId);
}
/**************************************************** Topic ****************************************************/
public static String getTopicNotExist(Long clusterPhyId, String topicName) {
return String.format("集群ID:[%d] Topic名称:[%s] 不存在", clusterPhyId, topicName);
}
public static String getTopicBizStr(Long clusterPhyId, String topicName) {
return String.format("集群ID:[%d] Topic名称:[%s]", clusterPhyId, topicName);
}
public static String getTopicExtend(Long existPartitionNum, Long totalPartitionNum,String expandParam){
return String.format("新增分区, 从:[%d] 增加到:[%d], 详细参数信息:[%s]", existPartitionNum,totalPartitionNum,expandParam);
}
public static String getClusterTopicKey(Long clusterPhyId, String topicName) {
return String.format("%d@%s", clusterPhyId, topicName);
}
/**************************************************** Partition ****************************************************/
public static String getPartitionNotExist(Long clusterPhyId, String topicName) {
return String.format("集群ID:[%d] Topic名称:[%s] 存在非法的分区ID", clusterPhyId, topicName);
}
public static String getPartitionNotExist(Long clusterPhyId, String topicName, Integer partitionId) {
return String.format("集群ID:[%d] Topic名称:[%s] 分区Id:[%d] 不存在", clusterPhyId, topicName, partitionId);
}
/**************************************************** KafkaUser ****************************************************/
public static String getKafkaUserBizStr(Long clusterPhyId, String kafkaUser) {
return String.format("集群ID:[%d] kafkaUser:[%s]", clusterPhyId, kafkaUser);
}
public static String getKafkaUserNotExist(Long clusterPhyId, String kafkaUser) {
return String.format("集群ID:[%d] kafkaUser:[%s] 不存在", clusterPhyId, kafkaUser);
}
public static String getKafkaUserDuplicate(Long clusterPhyId, String kafkaUser) {
return String.format("集群ID:[%d] kafkaUser:[%s] 已存在", clusterPhyId, kafkaUser);
}
/**************************************************** ha-Cluster ****************************************************/
public static String getActiveClusterDuplicate(Long clusterPhyId, String clusterName) {
return String.format("集群ID:[%d] 主集群:[%s] 已存在", clusterPhyId, clusterName);
}
/**************************************************** reassign ****************************************************/
public static String getReassignJobBizStr(Long jobId, Long clusterPhyId) {
return String.format("任务Id:[%d] 集群ID:[%s]", jobId, clusterPhyId);
}
public static String getJobIdCanNotNull() {
return "jobId不允许为空";
}
public static String getJobNotExist(Long jobId) {
return String.format("jobId:[%d] 不存在", jobId);
}
}

View File

@@ -0,0 +1,28 @@
package com.xiaojukeji.kafka.manager.common.entity;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.ToString;
import java.io.Serializable;
@Data
@ToString
public class BaseResult implements Serializable {
private static final long serialVersionUID = -5771016784021901099L;
@ApiModelProperty(value = "信息", example = "成功")
protected String message;
@ApiModelProperty(value = "状态", example = "0")
protected int code;
public boolean successful() {
return !this.failed();
}
public boolean failed() {
return !Constant.SUCCESS.equals(code);
}
}

View File

@@ -1,21 +1,23 @@
package com.xiaojukeji.kafka.manager.common.entity;
import com.alibaba.fastjson.JSON;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import java.io.Serializable;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
* @author huangyiminghappy@163.com
* @date 2019-07-08
*/
public class Result<T> implements Serializable {
private static final long serialVersionUID = -2772975319944108658L;
@Data
@ApiModel(description = "调用结果")
public class Result<T> extends BaseResult {
@ApiModelProperty(value = "数据")
protected T data;
private T data;
private String message;
private String tips;
private int code;
public Result() {
this.code = ResultStatus.SUCCESS.getCode();
this.message = ResultStatus.SUCCESS.getMessage();
}
public Result(T data) {
this.data = data;
@@ -23,10 +25,6 @@ public class Result<T> implements Serializable {
this.message = ResultStatus.SUCCESS.getMessage();
}
public Result() {
this(null);
}
public Result(Integer code, String message) {
this.message = message;
this.code = code;
@@ -38,48 +36,31 @@ public class Result<T> implements Serializable {
this.code = code;
}
public T getData()
{
return (T)this.data;
public static <T> Result<T> build(boolean succ) {
if (succ) {
return buildSuc();
}
return buildFail();
}
public void setData(T data)
{
this.data = data;
public static <T> Result<T> buildFail() {
Result<T> result = new Result<>();
result.setCode(ResultStatus.FAIL.getCode());
result.setMessage(ResultStatus.FAIL.getMessage());
return result;
}
public String getMessage()
{
return this.message;
}
public void setMessage(String message)
{
this.message = message;
}
public String getTips() {
return tips;
}
public void setTips(String tips) {
this.tips = tips;
}
public int getCode()
{
return this.code;
}
public void setCode(int code)
{
this.code = code;
}
@Override
public String toString()
{
return JSON.toJSONString(this);
public static <T> Result<T> build(boolean succ, T data) {
Result<T> result = new Result<>();
if (succ) {
result.setCode(ResultStatus.SUCCESS.getCode());
result.setMessage(ResultStatus.SUCCESS.getMessage());
result.setData(data);
} else {
result.setCode(ResultStatus.FAIL.getCode());
result.setMessage(ResultStatus.FAIL.getMessage());
}
return result;
}
public static <T> Result<T> buildSuc() {
@@ -97,14 +78,6 @@ public class Result<T> implements Serializable {
return result;
}
public static <T> Result<T> buildGatewayFailure(String message) {
Result<T> result = new Result<>();
result.setCode(ResultStatus.GATEWAY_INVALID_REQUEST.getCode());
result.setMessage(message);
result.setData(null);
return result;
}
public static <T> Result<T> buildFailure(String message) {
Result<T> result = new Result<>();
result.setCode(ResultStatus.FAIL.getCode());
@@ -113,10 +86,34 @@ public class Result<T> implements Serializable {
return result;
}
public static <T> Result<T> buildFrom(ResultStatus resultStatus) {
public static <T> Result<T> buildFailure(String message, T data) {
Result<T> result = new Result<>();
result.setCode(resultStatus.getCode());
result.setMessage(resultStatus.getMessage());
result.setCode(ResultStatus.FAIL.getCode());
result.setMessage(message);
result.setData(data);
return result;
}
public static <T> Result<T> buildFailure(ResultStatus rs) {
Result<T> result = new Result<>();
result.setCode(rs.getCode());
result.setMessage(rs.getMessage());
result.setData(null);
return result;
}
public static <T> Result<T> buildGatewayFailure(String message) {
Result<T> result = new Result<>();
result.setCode(ResultStatus.GATEWAY_INVALID_REQUEST.getCode());
result.setMessage(message);
result.setData(null);
return result;
}
public static <T> Result<T> buildFrom(ResultStatus rs) {
Result<T> result = new Result<>();
result.setCode(rs.getCode());
result.setMessage(rs.getMessage());
return result;
}
@@ -128,8 +125,46 @@ public class Result<T> implements Serializable {
return result;
}
public boolean failed() {
return !Constant.SUCCESS.equals(code);
public static <T> Result<T> buildFromRSAndMsg(ResultStatus resultStatus, String message) {
Result<T> result = new Result<>();
result.setCode(resultStatus.getCode());
result.setMessage(message);
result.setData(null);
return result;
}
public static <T> Result<T> buildFromRSAndData(ResultStatus rs, T data) {
Result<T> result = new Result<>();
result.setCode(rs.getCode());
result.setMessage(rs.getMessage());
result.setData(data);
return result;
}
public static <T, U> Result<T> buildFromIgnoreData(Result<U> anotherResult) {
Result<T> result = new Result<>();
result.setCode(anotherResult.getCode());
result.setMessage(anotherResult.getMessage());
return result;
}
public static <T> Result<T> buildParamIllegal(String msg) {
Result<T> result = new Result<>();
result.setCode(ResultStatus.PARAM_ILLEGAL.getCode());
result.setMessage(ResultStatus.PARAM_ILLEGAL.getMessage() + ":" + msg + ",请检查后再提交!");
return result;
}
public boolean hasData(){
return !failed() && this.data != null;
}
@Override
public String toString() {
return "Result{" +
"message='" + message + '\'' +
", code=" + code +
", data=" + data +
'}';
}
}

View File

@@ -23,6 +23,8 @@ public enum ResultStatus {
API_CALL_EXCEED_LIMIT(1403, "api call exceed limit"),
USER_WITHOUT_AUTHORITY(1404, "user without authority"),
CHANGE_ZOOKEEPER_FORBIDDEN(1405, "change zookeeper forbidden"),
HA_CLUSTER_DELETE_FORBIDDEN(1409, "先删除主topic才能删除该集群"),
HA_TOPIC_DELETE_FORBIDDEN(1410, "先解除高可用关系才能删除该topic"),
APP_OFFLINE_FORBIDDEN(1406, "先下线topic才能下线应用"),
@@ -76,6 +78,8 @@ public enum ResultStatus {
QUOTA_NOT_EXIST(7113, "quota not exist, please check clusterId, topicName and appId"),
CONSUMER_GROUP_NOT_EXIST(7114, "consumerGroup not exist"),
TOPIC_BIZ_DATA_NOT_EXIST(7115, "topic biz data not exist, please sync topic to db"),
SD_ZK_NOT_EXIST(7116, "SD_ZK未配置"),
// 资源已存在
RESOURCE_ALREADY_EXISTED(7200, "资源已经存在"),
@@ -88,6 +92,7 @@ public enum ResultStatus {
RESOURCE_ALREADY_USED(7400, "资源早已被使用"),
/**
* 因为外部系统的问题, 操作时引起的错误, [8000, 9000)
* ------------------------------------------------------------------------------------------
@@ -98,6 +103,7 @@ public enum ResultStatus {
ZOOKEEPER_READ_FAILED(8021, "zookeeper read failed"),
ZOOKEEPER_WRITE_FAILED(8022, "zookeeper write failed"),
ZOOKEEPER_DELETE_FAILED(8023, "zookeeper delete failed"),
ZOOKEEPER_OPERATE_FAILED(8024, "zookeeper operate failed"),
// 调用集群任务里面的agent失败
CALL_CLUSTER_TASK_AGENT_FAILED(8030, " call cluster task agent failed"),

View File

@@ -1,11 +1,14 @@
package com.xiaojukeji.kafka.manager.common.entity.ao;
import lombok.Data;
import java.util.Date;
/**
* @author zengqiao
* @date 20/4/23
*/
@Data
public class ClusterDetailDTO {
private Long clusterId;
@@ -41,141 +44,9 @@ public class ClusterDetailDTO {
private Integer regionNum;
public Long getClusterId() {
return clusterId;
}
private Integer haRelation;
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getZookeeper() {
return zookeeper;
}
public void setZookeeper(String zookeeper) {
this.zookeeper = zookeeper;
}
public String getBootstrapServers() {
return bootstrapServers;
}
public void setBootstrapServers(String bootstrapServers) {
this.bootstrapServers = bootstrapServers;
}
public String getKafkaVersion() {
return kafkaVersion;
}
public void setKafkaVersion(String kafkaVersion) {
this.kafkaVersion = kafkaVersion;
}
public String getIdc() {
return idc;
}
public void setIdc(String idc) {
this.idc = idc;
}
public Integer getMode() {
return mode;
}
public void setMode(Integer mode) {
this.mode = mode;
}
public String getSecurityProperties() {
return securityProperties;
}
public void setSecurityProperties(String securityProperties) {
this.securityProperties = securityProperties;
}
public String getJmxProperties() {
return jmxProperties;
}
public void setJmxProperties(String jmxProperties) {
this.jmxProperties = jmxProperties;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Date getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Date gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Date getGmtModify() {
return gmtModify;
}
public void setGmtModify(Date gmtModify) {
this.gmtModify = gmtModify;
}
public Integer getBrokerNum() {
return brokerNum;
}
public void setBrokerNum(Integer brokerNum) {
this.brokerNum = brokerNum;
}
public Integer getTopicNum() {
return topicNum;
}
public void setTopicNum(Integer topicNum) {
this.topicNum = topicNum;
}
public Integer getConsumerGroupNum() {
return consumerGroupNum;
}
public void setConsumerGroupNum(Integer consumerGroupNum) {
this.consumerGroupNum = consumerGroupNum;
}
public Integer getControllerId() {
return controllerId;
}
public void setControllerId(Integer controllerId) {
this.controllerId = controllerId;
}
public Integer getRegionNum() {
return regionNum;
}
public void setRegionNum(Integer regionNum) {
this.regionNum = regionNum;
}
private String mutualBackupClusterName;
@Override
public String toString() {
@@ -197,6 +68,8 @@ public class ClusterDetailDTO {
", consumerGroupNum=" + consumerGroupNum +
", controllerId=" + controllerId +
", regionNum=" + regionNum +
", haRelation=" + haRelation +
", mutualBackupClusterName='" + mutualBackupClusterName + '\'' +
'}';
}
}

View File

@@ -1,5 +1,7 @@
package com.xiaojukeji.kafka.manager.common.entity.ao;
import lombok.Data;
import java.util.List;
import java.util.Properties;
@@ -7,6 +9,7 @@ import java.util.Properties;
* @author zengqiao
* @date 20/6/10
*/
@Data
public class RdTopicBasic {
private Long clusterId;
@@ -26,77 +29,7 @@ public class RdTopicBasic {
private List<String> regionNameList;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public Properties getProperties() {
return properties;
}
public void setProperties(Properties properties) {
this.properties = properties;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<String> getRegionNameList() {
return regionNameList;
}
public void setRegionNameList(List<String> regionNameList) {
this.regionNameList = regionNameList;
}
private Integer haRelation;
@Override
public String toString() {
@@ -109,7 +42,8 @@ public class RdTopicBasic {
", appName='" + appName + '\'' +
", properties=" + properties +
", description='" + description + '\'' +
", regionNameList='" + regionNameList + '\'' +
", regionNameList=" + regionNameList +
", haRelation=" + haRelation +
'}';
}
}

View File

@@ -0,0 +1,54 @@
package com.xiaojukeji.kafka.manager.common.entity.ao.ha;
import com.xiaojukeji.kafka.manager.common.bizenum.ha.HaStatusEnum;
import lombok.Data;
import java.util.HashMap;
import java.util.Map;
@Data
public class HaSwitchTopic {
/**
* 是否完成
*/
private boolean finished;
/**
* 每一个Topic的状态
*/
private Map<String, Integer> activeTopicSwitchStatusMap;
public HaSwitchTopic(boolean finished) {
this.finished = finished;
this.activeTopicSwitchStatusMap = new HashMap<>();
}
public void addHaSwitchTopic(HaSwitchTopic haSwitchTopic) {
this.finished &= haSwitchTopic.finished;
}
public boolean isFinished() {
return this.finished;
}
public void addActiveTopicStatus(String activeTopicName, Integer status) {
activeTopicSwitchStatusMap.put(activeTopicName, status);
}
public boolean isActiveTopicSwitchFinished(String activeTopicName) {
Integer status = activeTopicSwitchStatusMap.get(activeTopicName);
if (status == null) {
return false;
}
return status.equals(HaStatusEnum.STABLE.getCode());
}
@Override
public String toString() {
return "HaSwitchTopic{" +
"finished=" + finished +
", activeTopicSwitchStatusMap=" + activeTopicSwitchStatusMap +
'}';
}
}

View File

@@ -0,0 +1,28 @@
package com.xiaojukeji.kafka.manager.common.entity.ao.ha.job;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
@ApiModel(description = "Job详情")
public class HaJobDetail {
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value="主集群ID")
private Long activeClusterPhyId;
@ApiModelProperty(value="备集群ID")
private Long standbyClusterPhyId;
@ApiModelProperty(value="Lag和")
private Long sumLag;
@ApiModelProperty(value="状态")
private Integer status;
}

View File

@@ -0,0 +1,16 @@
package com.xiaojukeji.kafka.manager.common.entity.ao.ha.job;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
@ApiModel(description = "Job日志")
public class HaJobLog {
@ApiModelProperty(value = "日志信息")
private String log;
}

View File

@@ -0,0 +1,70 @@
package com.xiaojukeji.kafka.manager.common.entity.ao.ha.job;
import com.xiaojukeji.kafka.manager.common.bizenum.ha.job.HaJobStatusEnum;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@NoArgsConstructor
public class HaJobState {
/**
* @see com.xiaojukeji.kafka.manager.common.bizenum.ha.job.HaJobStatusEnum
*/
private int status;
private int total;
private int success;
private int failed;
private int doing;
private int doingInTimeout;
private int unknown;
private Integer progress;
/**
* 按照状态,直接进行聚合
*/
public HaJobState(List<Integer> jobStatusList, Integer progress) {
this.total = jobStatusList.size();
this.success = 0;
this.failed = 0;
this.doing = 0;
this.doingInTimeout = 0;
this.unknown = 0;
for (Integer jobStatus: jobStatusList) {
if (HaJobStatusEnum.SUCCESS.getStatus() == jobStatus) {
success += 1;
} else if (HaJobStatusEnum.FAILED.getStatus() == jobStatus) {
failed += 1;
} else if (HaJobStatusEnum.RUNNING.getStatus() == jobStatus) {
doing += 1;
} else if (HaJobStatusEnum.RUNNING_IN_TIMEOUT.getStatus() == jobStatus) {
doingInTimeout += 1;
} else {
unknown += 1;
}
}
this.status = HaJobStatusEnum.getStatusBySubStatus(this.total, this.success, this.failed, this.doing, this.doingInTimeout, this.unknown).getStatus();
this.progress = progress;
}
public HaJobState(Integer doingSize, Integer progress) {
this.total = doingSize;
this.success = 0;
this.failed = 0;
this.doing = doingSize;
this.doingInTimeout = 0;
this.unknown = 0;
this.progress = progress;
}
}

View File

@@ -0,0 +1,12 @@
package com.xiaojukeji.kafka.manager.common.entity.ao.ha.job;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class HaSubJobExtendData {
private Long sumLag;
}

View File

@@ -1,11 +1,14 @@
package com.xiaojukeji.kafka.manager.common.entity.ao.topic;
import lombok.Data;
import java.util.List;
/**
* @author arthur
* @date 2018/09/03
*/
@Data
public class TopicBasicDTO {
private Long clusterId;
@@ -39,133 +42,7 @@ public class TopicBasicDTO {
private Long retentionBytes;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public String getPrincipals() {
return principals;
}
public void setPrincipals(String principals) {
this.principals = principals;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<String> getRegionNameList() {
return regionNameList;
}
public void setRegionNameList(List<String> regionNameList) {
this.regionNameList = regionNameList;
}
public Integer getScore() {
return score;
}
public void setScore(Integer score) {
this.score = score;
}
public String getTopicCodeC() {
return topicCodeC;
}
public void setTopicCodeC(String topicCodeC) {
this.topicCodeC = topicCodeC;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Integer getReplicaNum() {
return replicaNum;
}
public void setReplicaNum(Integer replicaNum) {
this.replicaNum = replicaNum;
}
public Integer getBrokerNum() {
return brokerNum;
}
public void setBrokerNum(Integer brokerNum) {
this.brokerNum = brokerNum;
}
public Long getModifyTime() {
return modifyTime;
}
public void setModifyTime(Long modifyTime) {
this.modifyTime = modifyTime;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public Long getRetentionBytes() {
return retentionBytes;
}
public void setRetentionBytes(Long retentionBytes) {
this.retentionBytes = retentionBytes;
}
private Integer haRelation;
@Override
public String toString() {
@@ -186,6 +63,7 @@ public class TopicBasicDTO {
", createTime=" + createTime +
", retentionTime=" + retentionTime +
", retentionBytes=" + retentionBytes +
", haRelation=" + haRelation +
'}';
}
}

View File

@@ -1,10 +1,13 @@
package com.xiaojukeji.kafka.manager.common.entity.ao.topic;
import lombok.Data;
/**
* Topic概览信息
* @author zengqiao
* @date 20/5/14
*/
@Data
public class TopicOverview {
private Long clusterId;
@@ -32,109 +35,7 @@ public class TopicOverview {
private Long logicalClusterId;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Integer getReplicaNum() {
return replicaNum;
}
public void setReplicaNum(Integer replicaNum) {
this.replicaNum = replicaNum;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public Object getByteIn() {
return byteIn;
}
public void setByteIn(Object byteIn) {
this.byteIn = byteIn;
}
public Object getByteOut() {
return byteOut;
}
public void setByteOut(Object byteOut) {
this.byteOut = byteOut;
}
public Object getProduceRequest() {
return produceRequest;
}
public void setProduceRequest(Object produceRequest) {
this.produceRequest = produceRequest;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Long getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Long updateTime) {
this.updateTime = updateTime;
}
public Long getLogicalClusterId() {
return logicalClusterId;
}
public void setLogicalClusterId(Long logicalClusterId) {
this.logicalClusterId = logicalClusterId;
}
private Integer haRelation;
@Override
public String toString() {
@@ -152,6 +53,7 @@ public class TopicOverview {
", description='" + description + '\'' +
", updateTime=" + updateTime +
", logicalClusterId=" + logicalClusterId +
", haRelation=" + haRelation +
'}';
}
}

View File

@@ -0,0 +1,26 @@
package com.xiaojukeji.kafka.manager.common.entity.dto.ha;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import javax.validation.constraints.NotBlank;
@Data
@ApiModel(description="Topic信息")
public class ASSwitchJobActionDTO {
/**
* @see com.xiaojukeji.kafka.manager.common.bizenum.TaskActionEnum
*/
@NotBlank(message = "action不允许为空")
@ApiModelProperty(value = "动作, force")
private String action;
// @NotNull(message = "all不允许为NULL")
// @ApiModelProperty(value = "所有的Topic")
// private Boolean allJumpWaitInSync;
//
// @NotNull(message = "jumpWaitInSyncActiveTopicList不允许为NULL")
// @ApiModelProperty(value = "操作的Topic")
// private List<String> jumpWaitInSyncActiveTopicList;
}

View File

@@ -0,0 +1,31 @@
package com.xiaojukeji.kafka.manager.common.entity.dto.ha;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import javax.validation.constraints.NotNull;
import java.util.List;
@Data
@ApiModel(description="主备切换任务")
public class ASSwitchJobDTO {
@NotNull(message = "all不允许为NULL")
@ApiModelProperty(value = "所有Topic")
private Boolean all;
@NotNull(message = "mustContainAllKafkaUserTopics不允许为NULL")
@ApiModelProperty(value = "是否需要包含KafkaUser关联的所有Topic")
private Boolean mustContainAllKafkaUserTopics;
@NotNull(message = "activeClusterPhyId不允许为NULL")
@ApiModelProperty(value="主集群ID")
private Long activeClusterPhyId;
@NotNull(message = "standbyClusterPhyId不允许为NULL")
@ApiModelProperty(value="备集群ID")
private Long standbyClusterPhyId;
@NotNull(message = "topicNameList不允许为NULL")
private List<String> topicNameList;
}

View File

@@ -0,0 +1,51 @@
package com.xiaojukeji.kafka.manager.common.entity.dto.op.topic;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import javax.validation.constraints.NotNull;
import java.util.List;
/**
* @author huangyiminghappy@163.com, zengqiao
* @date 2022-06-29
*/
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(description = "Topic高可用关联|解绑")
public class HaTopicRelationDTO {
@NotNull(message = "主集群id不能为空")
@ApiModelProperty(value = "主集群id")
private Long activeClusterId;
@NotNull(message = "备集群id不能为空")
@ApiModelProperty(value = "备集群id")
private Long standbyClusterId;
@NotNull(message = "是否应用于所有topic")
@ApiModelProperty(value = "是否应用于所有topic")
private Boolean all;
@ApiModelProperty(value = "需要关联|解绑的topic名称列表")
private List<String> topicNames;
@Override
public String toString() {
return "HaTopicRelationDTO{" +
", activeClusterId=" + activeClusterId +
", standbyClusterId=" + standbyClusterId +
", all=" + all +
", topicNames=" + topicNames +
'}';
}
public boolean paramLegal() {
if(!all && ValidateUtils.isEmptyList(topicNames)) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,24 @@
package com.xiaojukeji.kafka.manager.common.entity.dto.rd;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import javax.validation.constraints.NotNull;
import java.util.List;
/**
* @author zengqiao
* @date 20/5/4
*/
@Data
@ApiModel(description="App关联Topic信息")
public class AppRelateTopicsDTO {
@NotNull(message = "clusterPhyId不允许为NULL")
@ApiModelProperty(value="物理集群ID")
private Long clusterPhyId;
@NotNull(message = "filterTopicNameList不允许为NULL")
@ApiModelProperty(value="过滤的Topic列表")
private List<String> filterTopicNameList;
}

View File

@@ -4,11 +4,13 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
* @author zengqiao
* @date 20/4/23
*/
@Data
@ApiModel(description = "集群接入&修改")
@JsonIgnoreProperties(ignoreUnknown = true)
public class ClusterDTO {
@@ -33,60 +35,21 @@ public class ClusterDTO {
@ApiModelProperty(value="Jmx配置")
private String jmxProperties;
public Long getClusterId() {
return clusterId;
}
@ApiModelProperty(value="主集群Id")
private Long activeClusterId;
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
@ApiModelProperty(value="是否高可用")
private boolean isHa;
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getZookeeper() {
return zookeeper;
}
public void setZookeeper(String zookeeper) {
this.zookeeper = zookeeper;
}
public String getBootstrapServers() {
return bootstrapServers;
}
public void setBootstrapServers(String bootstrapServers) {
this.bootstrapServers = bootstrapServers;
}
public String getIdc() {
return idc;
}
public void setIdc(String idc) {
this.idc = idc;
}
public String getSecurityProperties() {
return securityProperties;
}
public void setSecurityProperties(String securityProperties) {
this.securityProperties = securityProperties;
}
public String getJmxProperties() {
return jmxProperties;
}
public void setJmxProperties(String jmxProperties) {
this.jmxProperties = jmxProperties;
public boolean legal() {
if (ValidateUtils.isNull(clusterName)
|| ValidateUtils.isNull(zookeeper)
|| ValidateUtils.isNull(idc)
|| ValidateUtils.isNull(bootstrapServers)
|| (isHa && ValidateUtils.isNull(activeClusterId))) {
return false;
}
return true;
}
@Override
@@ -99,16 +62,8 @@ public class ClusterDTO {
", idc='" + idc + '\'' +
", securityProperties='" + securityProperties + '\'' +
", jmxProperties='" + jmxProperties + '\'' +
", activeClusterId=" + activeClusterId +
", isHa=" + isHa +
'}';
}
public boolean legal() {
if (ValidateUtils.isNull(clusterName)
|| ValidateUtils.isNull(zookeeper)
|| ValidateUtils.isNull(idc)
|| ValidateUtils.isNull(bootstrapServers)) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,24 @@
package com.xiaojukeji.kafka.manager.common.entity.pagination;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
@Data
@ApiModel(description = "分页信息")
public class Pagination {
@ApiModelProperty(value = "总记录数", example = "100")
private long total;
@ApiModelProperty(value = "当前页码", example = "0")
private long pageNo;
@ApiModelProperty(value = "单页大小", example = "10")
private long pageSize;
public Pagination(long total, long pageNo, long pageSize) {
this.total = total;
this.pageNo = pageNo;
this.pageSize = pageSize;
}
}

View File

@@ -0,0 +1,17 @@
package com.xiaojukeji.kafka.manager.common.entity.pagination;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
@Data
@ApiModel(description = "分页数据")
public class PaginationData<T> {
@ApiModelProperty(value = "业务数据")
private List<T> bizData;
@ApiModelProperty(value = "分页信息")
private Pagination pagination;
}

View File

@@ -0,0 +1,30 @@
package com.xiaojukeji.kafka.manager.common.entity.pojo;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
/**
* @author zengqiao
* @date 21/07/19
*/
@Data
public class BaseDO implements Serializable {
private static final long serialVersionUID = 8782560709154468485L;
/**
* 主键ID
*/
protected Long id;
/**
* 创建时间
*/
protected Date createTime;
/**
* 更新时间
*/
protected Date modifyTime;
}

View File

@@ -1,11 +1,18 @@
package com.xiaojukeji.kafka.manager.common.entity.pojo;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import java.util.Date;
/**
* @author zengqiao
* @date 20/6/29
*/
@Data
@ToString
@NoArgsConstructor
public class LogicalClusterDO {
private Long id;
@@ -27,99 +34,17 @@ public class LogicalClusterDO {
private Date gmtModify;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
public LogicalClusterDO(String name,
String identification,
Integer mode,
String appId,
Long clusterId,
String regionList) {
this.name = name;
}
public String getIdentification() {
return identification;
}
public void setIdentification(String identification) {
this.identification = identification;
}
public Integer getMode() {
return mode;
}
public void setMode(Integer mode) {
this.mode = mode;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getRegionList() {
return regionList;
}
public void setRegionList(String regionList) {
this.regionList = regionList;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Date gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Date getGmtModify() {
return gmtModify;
}
public void setGmtModify(Date gmtModify) {
this.gmtModify = gmtModify;
}
@Override
public String toString() {
return "LogicalClusterDO{" +
"id=" + id +
", name='" + name + '\'' +
", identification='" + identification + '\'' +
", mode=" + mode +
", appId='" + appId + '\'' +
", clusterId=" + clusterId +
", regionList='" + regionList + '\'' +
", description='" + description + '\'' +
", gmtCreate=" + gmtCreate +
", gmtModify=" + gmtModify +
'}';
}
}

View File

@@ -1,7 +1,14 @@
package com.xiaojukeji.kafka.manager.common.entity.pojo;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import java.util.Date;
@Data
@ToString
@NoArgsConstructor
public class RegionDO implements Comparable<RegionDO> {
private Long id;
@@ -25,111 +32,13 @@ public class RegionDO implements Comparable<RegionDO> {
private String description;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
public RegionDO(Integer status, String name, Long clusterId, String brokerList) {
this.status = status;
}
public Date getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Date gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Date getGmtModify() {
return gmtModify;
}
public void setGmtModify(Date gmtModify) {
this.gmtModify = gmtModify;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getBrokerList() {
return brokerList;
}
public void setBrokerList(String brokerList) {
this.brokerList = brokerList;
}
public Long getCapacity() {
return capacity;
}
public void setCapacity(Long capacity) {
this.capacity = capacity;
}
public Long getRealUsed() {
return realUsed;
}
public void setRealUsed(Long realUsed) {
this.realUsed = realUsed;
}
public Long getEstimateUsed() {
return estimateUsed;
}
public void setEstimateUsed(Long estimateUsed) {
this.estimateUsed = estimateUsed;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override
public String toString() {
return "RegionDO{" +
"id=" + id +
", status=" + status +
", gmtCreate=" + gmtCreate +
", gmtModify=" + gmtModify +
", name='" + name + '\'' +
", clusterId=" + clusterId +
", brokerList='" + brokerList + '\'' +
", capacity=" + capacity +
", realUsed=" + realUsed +
", estimateUsed=" + estimateUsed +
", description='" + description + '\'' +
'}';
}
@Override
public int compareTo(RegionDO regionDO) {
return this.id.compareTo(regionDO.id);

View File

@@ -2,6 +2,8 @@ package com.xiaojukeji.kafka.manager.common.entity.pojo;
import com.xiaojukeji.kafka.manager.common.entity.dto.op.topic.TopicCreationDTO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Date;
@@ -9,6 +11,8 @@ import java.util.Date;
* @author zengqiao
* @date 20/4/24
*/
@Data
@NoArgsConstructor
public class TopicDO {
private Long id;
@@ -26,70 +30,14 @@ public class TopicDO {
private Long peakBytesIn;
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
public TopicDO(String appId, Long clusterId, String topicName, String description, Long peakBytesIn) {
this.appId = appId;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Long getPeakBytesIn() {
return peakBytesIn;
}
public void setPeakBytesIn(Long peakBytesIn) {
this.peakBytesIn = peakBytesIn;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Date getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Date gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Date getGmtModify() {
return gmtModify;
}
public void setGmtModify(Date gmtModify) {
this.gmtModify = gmtModify;
}
public static TopicDO buildFrom(TopicCreationDTO dto) {
TopicDO topicDO = new TopicDO();
topicDO.setAppId(dto.getAppId());

View File

@@ -0,0 +1,69 @@
package com.xiaojukeji.kafka.manager.common.entity.pojo.ha;
import com.baomidou.mybatisplus.annotation.TableName;
import com.xiaojukeji.kafka.manager.common.entity.pojo.BaseDO;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* HA-主备关系表
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
@TableName("ha_active_standby_relation")
public class HaASRelationDO extends BaseDO {
/**
* 主集群ID
*/
private Long activeClusterPhyId;
/**
* 主集群资源名称
*/
private String activeResName;
/**
* 备集群ID
*/
private Long standbyClusterPhyId;
/**
* 备集群资源名称
*/
private String standbyResName;
/**
* 资源类型
*/
private Integer resType;
/**
* 主备状态
*/
private Integer status;
/**
* 主备关系中的唯一性字段
*/
private String uniqueField;
public HaASRelationDO(Long id, Integer status) {
this.id = id;
this.status = status;
}
public HaASRelationDO(Long activeClusterPhyId, String activeResName, Long standbyClusterPhyId, String standbyResName, Integer resType, Integer status) {
this.activeClusterPhyId = activeClusterPhyId;
this.activeResName = activeResName;
this.standbyClusterPhyId = standbyClusterPhyId;
this.standbyResName = standbyResName;
this.resType = resType;
this.status = status;
// 主备两个资源之间唯一,但是不保证两个资源之间,只存在主备关系,也可能存在双活关系,及各自都为对方的主备
this.uniqueField = String.format("%d_%s||%d_%s||%d", activeClusterPhyId, activeResName, standbyClusterPhyId, standbyResName, resType);
}
}

View File

@@ -0,0 +1,42 @@
package com.xiaojukeji.kafka.manager.common.entity.pojo.ha;
import com.baomidou.mybatisplus.annotation.TableName;
import com.xiaojukeji.kafka.manager.common.entity.pojo.BaseDO;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* HA-主备关系切换任务表
*/
@Data
@NoArgsConstructor
@TableName("ha_active_standby_switch_job")
public class HaASSwitchJobDO extends BaseDO {
/**
* 主集群ID
*/
private Long activeClusterPhyId;
/**
* 备集群ID
*/
private Long standbyClusterPhyId;
/**
* 主备状态
*/
private Integer jobStatus;
/**
* 操作人
*/
private String operator;
public HaASSwitchJobDO(Long activeClusterPhyId, Long standbyClusterPhyId, Integer jobStatus, String operator) {
this.activeClusterPhyId = activeClusterPhyId;
this.standbyClusterPhyId = standbyClusterPhyId;
this.jobStatus = jobStatus;
this.operator = operator;
}
}

View File

@@ -0,0 +1,67 @@
package com.xiaojukeji.kafka.manager.common.entity.pojo.ha;
import com.baomidou.mybatisplus.annotation.TableName;
import com.xiaojukeji.kafka.manager.common.entity.pojo.BaseDO;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* HA-主备关系切换子任务表
*/
@Data
@NoArgsConstructor
@TableName("ha_active_standby_switch_sub_job")
public class HaASSwitchSubJobDO extends BaseDO {
/**
* 任务ID
*/
private Long jobId;
/**
* 主集群ID
*/
private Long activeClusterPhyId;
/**
* 主集群资源名称
*/
private String activeResName;
/**
* 备集群ID
*/
private Long standbyClusterPhyId;
/**
* 备集群资源名称
*/
private String standbyResName;
/**
* 资源类型
*/
private Integer resType;
/**
* 任务状态
*/
private Integer jobStatus;
/**
* 扩展数据
* @see com.xiaojukeji.kafka.manager.common.entity.ao.ha.job.HaSubJobExtendData
*/
private String extendData;
public HaASSwitchSubJobDO(Long jobId, Long activeClusterPhyId, String activeResName, Long standbyClusterPhyId, String standbyResName, Integer resType, Integer jobStatus, String extendData) {
this.jobId = jobId;
this.activeClusterPhyId = activeClusterPhyId;
this.activeResName = activeResName;
this.standbyClusterPhyId = standbyClusterPhyId;
this.standbyResName = standbyResName;
this.resType = resType;
this.jobStatus = jobStatus;
this.extendData = extendData;
}
}

View File

@@ -0,0 +1,50 @@
package com.xiaojukeji.kafka.manager.common.entity.pojo.ha;
import com.baomidou.mybatisplus.annotation.TableName;
import com.xiaojukeji.kafka.manager.common.entity.pojo.BaseDO;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Date;
@Data
@NoArgsConstructor
@TableName("job_log")
public class JobLogDO extends BaseDO {
/**
* 业务类型
*/
private Integer bizType;
/**
* 业务关键字
*/
private String bizKeyword;
/**
* 打印时间
*/
private Date printTime;
/**
* 内容
*/
private String content;
public JobLogDO(Integer bizType, String bizKeyword) {
this.bizType = bizType;
this.bizKeyword = bizKeyword;
}
public JobLogDO(Integer bizType, String bizKeyword, Date printTime, String content) {
this.bizType = bizType;
this.bizKeyword = bizKeyword;
this.printTime = printTime;
this.content = content;
}
public JobLogDO setAndCopyNew(Date printTime, String content) {
return new JobLogDO(this.bizType, this.bizKeyword, printTime, content);
}
}

View File

@@ -2,12 +2,14 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.common;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
* Topic信息
* @author zengqiao
* @date 19/4/1
*/
@Data
@ApiModel(description = "Topic信息概览")
public class TopicOverviewVO {
@ApiModelProperty(value = "集群ID")
@@ -49,109 +51,8 @@ public class TopicOverviewVO {
@ApiModelProperty(value = "逻辑集群id")
private Long logicalClusterId;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Integer getReplicaNum() {
return replicaNum;
}
public void setReplicaNum(Integer replicaNum) {
this.replicaNum = replicaNum;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public Object getByteIn() {
return byteIn;
}
public void setByteIn(Object byteIn) {
this.byteIn = byteIn;
}
public Object getByteOut() {
return byteOut;
}
public void setByteOut(Object byteOut) {
this.byteOut = byteOut;
}
public Object getProduceRequest() {
return produceRequest;
}
public void setProduceRequest(Object produceRequest) {
this.produceRequest = produceRequest;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Long getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Long updateTime) {
this.updateTime = updateTime;
}
public Long getLogicalClusterId() {
return logicalClusterId;
}
public void setLogicalClusterId(Long logicalClusterId) {
this.logicalClusterId = logicalClusterId;
}
@ApiModelProperty(value = "高可用关系1:主topic, 0:备topic , 其他:非高可用topic")
private Integer haRelation;
@Override
public String toString() {
@@ -169,6 +70,7 @@ public class TopicOverviewVO {
", description='" + description + '\'' +
", updateTime=" + updateTime +
", logicalClusterId=" + logicalClusterId +
", haRelation=" + haRelation +
'}';
}
}

View File

@@ -0,0 +1,34 @@
package com.xiaojukeji.kafka.manager.common.entity.vo.ha;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
* @author zengqiao
* @date 20/4/29
*/
@Data
@ApiModel(description="HA集群-Topic信息")
public class HaClusterTopicVO {
@ApiModelProperty(value="当前查询的集群ID")
private Long clusterId;
@ApiModelProperty(value="Topic名称")
private String topicName;
@ApiModelProperty(value="生产Acl数量")
private Integer produceAclNum;
@ApiModelProperty(value="消费Acl数量")
private Integer consumeAclNum;
@ApiModelProperty(value="主集群ID")
private Long activeClusterId;
@ApiModelProperty(value="备集群ID")
private Long standbyClusterId;
@ApiModelProperty(value="主备状态")
private Integer status;
}

View File

@@ -0,0 +1,48 @@
package com.xiaojukeji.kafka.manager.common.entity.vo.ha;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster.ClusterBaseVO;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
* @author zengqiao
* @date 20/4/29
*/
@Data
@ApiModel(description="HA集群-集群信息")
public class HaClusterVO extends ClusterBaseVO {
@ApiModelProperty(value="broker数量")
private Integer brokerNum;
@ApiModelProperty(value="topic数量")
private Integer topicNum;
@ApiModelProperty(value="消费组数")
private Integer consumerGroupNum;
@ApiModelProperty(value="region数")
private Integer regionNum;
@ApiModelProperty(value="ControllerID")
private Integer controllerId;
/**
* @see com.xiaojukeji.kafka.manager.common.bizenum.ha.HaStatusEnum
*/
@ApiModelProperty(value="主备状态")
private Integer haStatus;
@ApiModelProperty(value="主topic数")
private Long activeTopicCount;
@ApiModelProperty(value="备topic数")
private Long standbyTopicCount;
@ApiModelProperty(value="备集群信息")
private HaClusterVO haClusterVO;
@ApiModelProperty(value="切换任务id")
private Long haASSwitchJobId;
}

View File

@@ -0,0 +1,37 @@
package com.xiaojukeji.kafka.manager.common.entity.vo.ha.job;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
@ApiModel(description = "Job详情")
public class HaJobDetailVO {
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value="主物理集群ID")
private Long activeClusterPhyId;
@ApiModelProperty(value="主物理集群名称")
private String activeClusterPhyName;
@ApiModelProperty(value="备物理集群ID")
private Long standbyClusterPhyId;
@ApiModelProperty(value="备物理集群名称")
private String standbyClusterPhyName;
@ApiModelProperty(value="Lag和")
private Long sumLag;
@ApiModelProperty(value="状态")
private Integer status;
@ApiModelProperty(value="超时时间配置")
private Long timeoutUnitSecConfig;
}

View File

@@ -0,0 +1,46 @@
package com.xiaojukeji.kafka.manager.common.entity.vo.ha.job;
import com.xiaojukeji.kafka.manager.common.entity.ao.ha.job.HaJobState;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
@ApiModel(description = "Job状态")
public class HaJobStateVO {
@ApiModelProperty(value = "任务总数")
private Integer jobNu;
@ApiModelProperty(value = "运行中的任务数")
private Integer runningNu;
@ApiModelProperty(value = "超时运行中的任务数")
private Integer runningInTimeoutNu;
@ApiModelProperty(value = "准备好待运行的任务数")
private Integer waitingNu;
@ApiModelProperty(value = "运行成功的任务数")
private Integer successNu;
@ApiModelProperty(value = "运行失败的任务数")
private Integer failedNu;
@ApiModelProperty(value = "进度,[0 - 100]")
private Integer progress;
public HaJobStateVO(HaJobState jobState) {
this.jobNu = jobState.getTotal();
this.runningNu = jobState.getDoing();
this.runningInTimeoutNu = jobState.getDoingInTimeout();
this.waitingNu = 0;
this.successNu = jobState.getSuccess();
this.failedNu = jobState.getFailed();
this.progress = jobState.getProgress();
}
}

View File

@@ -0,0 +1,26 @@
package com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
* @author zengqiao
* @date 20/4/8
*/
@Data
@ApiModel(value = "集群的topic高可用状态")
public class HaClusterTopicHaStatusVO {
@ApiModelProperty(value = "物理集群ID")
private Long clusterId;
@ApiModelProperty(value = "物理集群名称")
private String clusterName;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "高可用关系1:主topic, 0:备topic , 其他:非高可用topic")
private Integer haRelation;
}

View File

@@ -2,6 +2,7 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
@@ -10,6 +11,7 @@ import java.util.List;
* @author zengqiao
* @date 19/4/1
*/
@Data
@ApiModel(description = "Topic基本信息")
public class TopicBasicVO {
@ApiModelProperty(value = "集群id")
@@ -57,125 +59,8 @@ public class TopicBasicVO {
@ApiModelProperty(value = "所属region")
private List<String> regionNameList;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Integer getReplicaNum() {
return replicaNum;
}
public void setReplicaNum(Integer replicaNum) {
this.replicaNum = replicaNum;
}
public String getPrincipals() {
return principals;
}
public void setPrincipals(String principals) {
this.principals = principals;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public Long getRetentionBytes() {
return retentionBytes;
}
public void setRetentionBytes(Long retentionBytes) {
this.retentionBytes = retentionBytes;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
public Long getModifyTime() {
return modifyTime;
}
public void setModifyTime(Long modifyTime) {
this.modifyTime = modifyTime;
}
public Integer getScore() {
return score;
}
public void setScore(Integer score) {
this.score = score;
}
public String getTopicCodeC() {
return topicCodeC;
}
public void setTopicCodeC(String topicCodeC) {
this.topicCodeC = topicCodeC;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getBootstrapServers() {
return bootstrapServers;
}
public void setBootstrapServers(String bootstrapServers) {
this.bootstrapServers = bootstrapServers;
}
public List<String> getRegionNameList() {
return regionNameList;
}
public void setRegionNameList(List<String> regionNameList) {
this.regionNameList = regionNameList;
}
@ApiModelProperty(value = "高可用关系1:主topic, 0:备topic , 其他:非主备topic")
private Integer haRelation;
@Override
public String toString() {
@@ -195,6 +80,7 @@ public class TopicBasicVO {
", description='" + description + '\'' +
", bootstrapServers='" + bootstrapServers + '\'' +
", regionNameList=" + regionNameList +
", haRelation=" + haRelation +
'}';
}
}

View File

@@ -0,0 +1,26 @@
package com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
* @author zengqiao
* @date 20/4/8
*/
@Data
@ApiModel(value = "Topic信息")
public class TopicHaVO {
@ApiModelProperty(value = "物理集群ID")
private Long clusterId;
@ApiModelProperty(value = "物理集群名称")
private String clusterName;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "高可用关系1:主topic, 0:备topic , 其他:非高可用topic")
private Integer haRelation;
}

View File

@@ -2,6 +2,7 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.rd;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
import java.util.Properties;
@@ -10,6 +11,7 @@ import java.util.Properties;
* @author zengqiao
* @date 20/6/10
*/
@Data
@ApiModel(description = "Topic基本信息(RD视角)")
public class RdTopicBasicVO {
@ApiModelProperty(value = "集群ID")
@@ -39,77 +41,8 @@ public class RdTopicBasicVO {
@ApiModelProperty(value = "所属region")
private List<String> regionNameList;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public Properties getProperties() {
return properties;
}
public void setProperties(Properties properties) {
this.properties = properties;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<String> getRegionNameList() {
return regionNameList;
}
public void setRegionNameList(List<String> regionNameList) {
this.regionNameList = regionNameList;
}
@ApiModelProperty(value = "高可用关系1:主topic, 0:备topic , 其他:非主备topic")
private Integer haRelation;
@Override
public String toString() {
@@ -122,7 +55,8 @@ public class RdTopicBasicVO {
", appName='" + appName + '\'' +
", properties=" + properties +
", description='" + description + '\'' +
", regionNameList='" + regionNameList + '\'' +
", regionNameList=" + regionNameList +
", haRelation=" + haRelation +
'}';
}
}

View File

@@ -0,0 +1,30 @@
package com.xiaojukeji.kafka.manager.common.entity.vo.rd.app;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
/**
* @author zengqiao
* @date 20/5/4
*/
@Data
@ApiModel(description="App关联Topic信息")
public class AppRelateTopicsVO {
@ApiModelProperty(value="物理集群ID")
private Long clusterPhyId;
@ApiModelProperty(value="kafkaUser")
private String kafkaUser;
@ApiModelProperty(value="选中的Topic列表")
private List<String> selectedTopicNameList;
@ApiModelProperty(value="未选中的Topic列表")
private List<String> notSelectTopicNameList;
@ApiModelProperty(value="未建立HA的Topic列表")
private List<String> notHaTopicNameList;
}

View File

@@ -2,11 +2,13 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
* @author zengqiao
* @date 20/4/23
*/
@Data
@ApiModel(description="集群信息")
public class ClusterDetailVO extends ClusterBaseVO {
@ApiModelProperty(value="Broker数")
@@ -24,45 +26,11 @@ public class ClusterDetailVO extends ClusterBaseVO {
@ApiModelProperty(value="Region数")
private Integer regionNum;
public Integer getBrokerNum() {
return brokerNum;
}
@ApiModelProperty(value = "高可用关系1:主, 0:备 , 其他:非高可用")
private Integer haRelation;
public void setBrokerNum(Integer brokerNum) {
this.brokerNum = brokerNum;
}
public Integer getTopicNum() {
return topicNum;
}
public void setTopicNum(Integer topicNum) {
this.topicNum = topicNum;
}
public Integer getConsumerGroupNum() {
return consumerGroupNum;
}
public void setConsumerGroupNum(Integer consumerGroupNum) {
this.consumerGroupNum = consumerGroupNum;
}
public Integer getControllerId() {
return controllerId;
}
public void setControllerId(Integer controllerId) {
this.controllerId = controllerId;
}
public Integer getRegionNum() {
return regionNum;
}
public void setRegionNum(Integer regionNum) {
this.regionNum = regionNum;
}
@ApiModelProperty(value = "互备集群名称")
private String mutualBackupClusterName;
@Override
public String toString() {
@@ -72,6 +40,8 @@ public class ClusterDetailVO extends ClusterBaseVO {
", consumerGroupNum=" + consumerGroupNum +
", controllerId=" + controllerId +
", regionNum=" + regionNum +
"} " + super.toString();
", haRelation=" + haRelation +
", mutualBackupClusterName='" + mutualBackupClusterName + '\'' +
'}';
}
}

View File

@@ -0,0 +1,30 @@
package com.xiaojukeji.kafka.manager.common.entity.vo.rd.job;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Date;
@Data
@NoArgsConstructor
@AllArgsConstructor
@ApiModel(description = "Job日志")
public class JobLogVO {
@ApiModelProperty(value = "日志ID")
protected Long id;
@ApiModelProperty(value = "业务类型")
private Integer bizType;
@ApiModelProperty(value = "业务关键字")
private String bizKeyword;
@ApiModelProperty(value = "打印时间")
private Date printTime;
@ApiModelProperty(value = "内容")
private String content;
}

View File

@@ -0,0 +1,31 @@
package com.xiaojukeji.kafka.manager.common.entity.vo.rd.job;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.ArrayList;
import java.util.List;
@Data
@NoArgsConstructor
@AllArgsConstructor
@ApiModel(description = "Job日志")
public class JobMulLogVO {
@ApiModelProperty(value = "末尾日志ID")
private Long endLogId;
@ApiModelProperty(value = "日志信息")
private List<JobLogVO> logList;
public JobMulLogVO(List<JobLogVO> logList, Long startLogId) {
this.logList = logList == null? new ArrayList<>(): logList;
if (!this.logList.isEmpty()) {
this.endLogId = this.logList.stream().map(elem -> elem.id).reduce(Long::max).get() + 1;
} else {
this.endLogId = startLogId;
}
}
}

View File

@@ -0,0 +1,404 @@
package com.xiaojukeji.kafka.manager.common.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.google.common.collect.*;
import org.apache.commons.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Consumer;
import java.util.function.Function;
public class ConvertUtil {
private static final Logger LOGGER = LoggerFactory.getLogger(ConvertUtil.class);
private ConvertUtil(){}
public static <T> T toObj(String json, Type resultType) {
if (resultType instanceof Class) {
Class<T> clazz = (Class<T>) resultType;
return str2ObjByJson(json, clazz);
}
return JSON.parseObject(json, resultType);
}
public static <T> T str2ObjByJson(String srcStr, Class<T> tgtClass) {
return JSON.parseObject(srcStr, tgtClass);
}
public static <T> T str2ObjByJson(String srcStr, TypeReference<T> tt) {
return JSON.parseObject(srcStr, tt);
}
public static String obj2Json(Object srcObj) {
if (srcObj == null) {
return null;
}
if (srcObj instanceof String) {
return (String) srcObj;
} else {
return JSON.toJSONString(srcObj);
}
}
public static String obj2JsonWithIgnoreCircularReferenceDetect(Object srcObj) {
return JSON.toJSONString(srcObj, SerializerFeature.DisableCircularReferenceDetect);
}
public static <T> List<T> str2ObjArrayByJson(String srcStr, Class<T> tgtClass) {
return JSON.parseArray(srcStr, tgtClass);
}
public static <T> T obj2ObjByJSON(Object srcObj, Class<T> tgtClass) {
return JSON.parseObject( JSON.toJSONString(srcObj), tgtClass);
}
public static String list2String(List<?> list, String separator) {
if (list == null || list.isEmpty()) {
return "";
}
StringBuilder sb = new StringBuilder();
for (Object item : list) {
sb.append(item).append(separator);
}
return sb.deleteCharAt(sb.length() - 1).toString();
}
public static <K, V> Map<K, V> list2Map(List<V> list, Function<? super V, ? extends K> mapper) {
Map<K, V> map = Maps.newHashMap();
if (CollectionUtils.isNotEmpty(list)) {
for (V v : list) {
map.put(mapper.apply(v), v);
}
}
return map;
}
public static <K, V> Map<K, V> list2MapParallel(List<V> list, Function<? super V, ? extends K> mapper) {
Map<K, V> map = new ConcurrentHashMap<>();
if (CollectionUtils.isNotEmpty(list)) {
list.parallelStream().forEach(v -> map.put(mapper.apply(v), v));
}
return map;
}
public static <K, V, O> Map<K, V> list2Map(List<O> list, Function<? super O, ? extends K> keyMapper,
Function<? super O, ? extends V> valueMapper) {
Map<K, V> map = Maps.newHashMap();
if (CollectionUtils.isNotEmpty(list)) {
for (O o : list) {
map.put(keyMapper.apply(o), valueMapper.apply(o));
}
}
return map;
}
public static <K, V> Multimap<K, V> list2MulMap(List<V> list, Function<? super V, ? extends K> mapper) {
Multimap<K, V> multimap = ArrayListMultimap.create();
if (CollectionUtils.isNotEmpty(list)) {
for (V v : list) {
multimap.put(mapper.apply(v), v);
}
}
return multimap;
}
public static <K, V, O> Multimap<K, V> list2MulMap(List<O> list, Function<? super O, ? extends K> keyMapper,
Function<? super O, ? extends V> valueMapper) {
Multimap<K, V> multimap = ArrayListMultimap.create();
if (CollectionUtils.isNotEmpty(list)) {
for (O o : list) {
multimap.put(keyMapper.apply(o), valueMapper.apply(o));
}
}
return multimap;
}
public static <K, V, O> Map<K, List<V>> list2MapOfList(List<O> list, Function<? super O, ? extends K> keyMapper,
Function<? super O, ? extends V> valueMapper) {
ArrayListMultimap<K, V> multimap = ArrayListMultimap.create();
if (CollectionUtils.isNotEmpty(list)) {
for (O o : list) {
multimap.put(keyMapper.apply(o), valueMapper.apply(o));
}
}
return Multimaps.asMap(multimap);
}
public static <K, V> Set<K> list2Set(List<V> list, Function<? super V, ? extends K> mapper) {
Set<K> set = Sets.newHashSet();
if (CollectionUtils.isNotEmpty(list)) {
for (V v : list) {
set.add(mapper.apply(v));
}
}
return set;
}
public static <T> Set<T> set2Set(Set<? extends Object> set, Class<T> tClass) {
if (CollectionUtils.isEmpty(set)) {
return new HashSet<>();
}
Set<T> result = new HashSet<>();
for (Object o : set) {
T t = obj2Obj(o, tClass);
if (t != null) {
result.add(t);
}
}
return result;
}
public static <T> List<T> list2List(List<? extends Object> list, Class<T> tClass) {
return list2List(list, tClass, (t) -> {
});
}
public static <T> List<T> list2List(List<? extends Object> list, Class<T> tClass, Consumer<T> consumer) {
if (CollectionUtils.isEmpty(list)) {
return Lists.newArrayList();
}
List<T> result = Lists.newArrayList();
for (Object object : list) {
T t = obj2Obj(object, tClass, consumer);
if (t != null) {
result.add(t);
}
}
return result;
}
/**
* 对象转换工具
* @param srcObj 元对象
* @param tgtClass 目标对象类
* @param <T> 泛型
* @return 目标对象
*/
public static <T> T obj2Obj(final Object srcObj, Class<T> tgtClass) {
return obj2Obj(srcObj, tgtClass, (t) -> {
});
}
public static <T> T obj2Obj(final Object srcObj, Class<T> tgtClass, Consumer<T> consumer) {
if (srcObj == null) {
return null;
}
T tgt = null;
try {
tgt = tgtClass.newInstance();
BeanUtils.copyProperties(srcObj, tgt);
consumer.accept(tgt);
} catch (Exception e) {
LOGGER.warn("class=ConvertUtil||method=obj2Obj||msg={}", e.getMessage());
}
return tgt;
}
public static <K, V> Map<K, V> mergeMapList(List<Map<K, V>> mapList) {
Map<K, V> result = Maps.newHashMap();
for (Map<K, V> map : mapList) {
result.putAll(map);
}
return result;
}
public static Map<String, Object> Obj2Map(Object obj) {
if (null == obj) {
return null;
}
Map<String, Object> map = new HashMap<>();
Field[] fields = obj.getClass().getDeclaredFields();
for (Field field : fields) {
field.setAccessible(true);
try {
map.put(field.getName(), field.get(obj));
} catch (IllegalAccessException e) {
LOGGER.warn("class=ConvertUtil||method=Obj2Map||msg={}", e.getMessage(), e);
}
}
return map;
}
public static Object map2Obj(Map<String, Object> map, Class<?> clz) {
Object obj = null;
try {
obj = clz.newInstance();
Field[] declaredFields = obj.getClass().getDeclaredFields();
for (Field field : declaredFields) {
int mod = field.getModifiers();
if (Modifier.isStatic(mod) || Modifier.isFinal(mod)) {
continue;
}
field.setAccessible(true);
field.set(obj, map.get(field.getName()));
}
} catch (Exception e) {
LOGGER.warn("class=ConvertUtil||method=map2Obj||msg={}", e.getMessage(), e);
}
return obj;
}
public static Map<String, Double> sortMapByValue(Map<String, Double> map) {
List<Entry<String, Double>> data = new ArrayList<>(map.entrySet());
data.sort((o1, o2) -> {
if ((o2.getValue() - o1.getValue()) > 0) {
return 1;
} else if ((o2.getValue() - o1.getValue()) == 0) {
return 0;
} else {
return -1;
}
});
Map<String, Double> result = Maps.newLinkedHashMap();
for (Entry<String, Double> next : data) {
result.put(next.getKey(), next.getValue());
}
return result;
}
public static Map<String, Object> directFlatObject(JSONObject obj) {
Map<String, Object> ret = new HashMap<>();
if(obj==null) {
return ret;
}
for (Entry<String, Object> entry : obj.entrySet()) {
String key = entry.getKey();
Object o = entry.getValue();
if (o instanceof JSONObject) {
Map<String, Object> m = directFlatObject((JSONObject) o);
for (Entry<String, Object> e : m.entrySet()) {
ret.put(key + "." + e.getKey(), e.getValue());
}
} else {
ret.put(key, o);
}
}
return ret;
}
public static Long string2Long(String s) {
if (ValidateUtils.isNull(s)) {
return null;
}
try {
return Long.parseLong(s);
} catch (Exception e) {
// ignore exception
}
return null;
}
public static Float string2Float(String s) {
if (ValidateUtils.isNull(s)) {
return null;
}
try {
return Float.parseFloat(s);
} catch (Exception e) {
// ignore exception
}
return null;
}
public static String float2String(Float f) {
if (ValidateUtils.isNull(f)) {
return null;
}
try {
return String.valueOf(f);
} catch (Exception e) {
// ignore exception
}
return null;
}
public static Integer string2Integer(String s) {
if (null == s) {
return null;
}
try {
return Integer.parseInt(s);
} catch (Exception e) {
// ignore exception
}
return null;
}
public static Double string2Double(String s) {
if (null == s) {
return null;
}
try {
return Double.parseDouble(s);
} catch (Exception e) {
// ignore exception
}
return null;
}
public static Long double2Long(Double d) {
if (null == d) {
return null;
}
try {
return d.longValue();
} catch (Exception e) {
// ignore exception
}
return null;
}
public static Integer double2Int(Double d) {
if (null == d) {
return null;
}
try {
return d.intValue();
} catch (Exception e) {
// ignore exception
}
return null;
}
public static Long Float2Long(Float f) {
if (null == f) {
return null;
}
try {
return f.longValue();
} catch (Exception e) {
// ignore exception
}
return null;
}
}

View File

@@ -15,6 +15,7 @@ import java.util.concurrent.ConcurrentHashMap;
* @author huangyiminghappy@163.com
* @date 2019/3/15
*/
@Deprecated
public class CopyUtils {
@SuppressWarnings({"unchecked", "rawtypes"})

View File

@@ -40,6 +40,14 @@ public class FutureUtil<T> {
return futureUtil;
}
public Future<T> directSubmitTask(Callable<T> callable) {
return executor.submit(callable);
}
public Future<T> directSubmitTask(Runnable runnable) {
return (Future<T>) executor.submit(runnable);
}
/**
* 必须配合 waitExecute使用 否则容易会撑爆内存
*/

View File

@@ -8,6 +8,8 @@ package com.xiaojukeji.kafka.manager.common.zookeeper;
public class ZkPathUtil {
private static final String ZOOKEEPER_SEPARATOR = "/";
public static final String CLUSTER_ID_NODE = ZOOKEEPER_SEPARATOR + "cluster/id";
public static final String BROKER_ROOT_NODE = ZOOKEEPER_SEPARATOR + "brokers";
public static final String CONTROLLER_ROOT_NODE = ZOOKEEPER_SEPARATOR + "controller";