mirror of
https://github.com/didi/KnowStreaming.git
synced 2026-01-09 00:14:30 +08:00
kafka-manager 2.0
This commit is contained in:
@@ -0,0 +1,32 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterTaskDO;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.ClusterTaskStatus;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.dto.AbstractClusterTaskDTO;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskStateEnum;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/5/19
|
||||
*/
|
||||
public interface ClusterTaskService {
|
||||
Result createTask(AbstractClusterTaskDTO dto, String operator);
|
||||
|
||||
ClusterTaskDO getById(Long taskId);
|
||||
|
||||
List<ClusterTaskDO> listAll();
|
||||
|
||||
ResultStatus executeTask(Long taskId, String action, String hostname);
|
||||
|
||||
Result<String> getTaskLog(Long taskId, String hostname);
|
||||
|
||||
Result<ClusterTaskStatus> getTaskStatus(Long taskId);
|
||||
|
||||
ClusterTaskStateEnum getTaskState(Long agentTaskId);
|
||||
|
||||
int updateTaskState(Long taskId, Integer taskStatus);
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.KafkaFileDTO;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.KafkaFileDO;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author zhongyuankai
|
||||
* @date 2020/5/7
|
||||
*/
|
||||
public interface KafkaFileService {
|
||||
ResultStatus uploadKafkaFile(KafkaFileDTO kafkaFileDTO, String userName);
|
||||
|
||||
ResultStatus modifyKafkaFile(KafkaFileDTO kafkaFileDTO, String userName);
|
||||
|
||||
ResultStatus deleteKafkaFile(Long id);
|
||||
|
||||
List<KafkaFileDO> getKafkaFiles();
|
||||
|
||||
KafkaFileDO getFileById(Long id);
|
||||
|
||||
KafkaFileDO getFileByFileName(String fileName);
|
||||
|
||||
Result<String> downloadKafkaConfigFile(Long fileId);
|
||||
|
||||
String getDownloadBaseUrl();
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterTaskDO;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ClusterTaskConstant;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.CreationTaskData;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/9/8
|
||||
*/
|
||||
public class Converters {
|
||||
public static ClusterTaskDO convert2ClusterTaskDO(Long agentId,
|
||||
CreationTaskData creationTaskDTO,
|
||||
String operator) {
|
||||
ClusterTaskDO clusterTaskDO = new ClusterTaskDO();
|
||||
clusterTaskDO.setUuid(creationTaskDTO.getUuid());
|
||||
clusterTaskDO.setClusterId(creationTaskDTO.getClusterId());
|
||||
clusterTaskDO.setTaskType(creationTaskDTO.getTaskType());
|
||||
clusterTaskDO.setKafkaPackage(creationTaskDTO.getKafkaPackageName());
|
||||
clusterTaskDO.setKafkaPackageMd5(creationTaskDTO.getKafkaPackageMd5());
|
||||
clusterTaskDO.setServerProperties(creationTaskDTO.getServerPropertiesName());
|
||||
clusterTaskDO.setServerPropertiesMd5(creationTaskDTO.getServerPropertiesMd5());
|
||||
clusterTaskDO.setAgentTaskId(agentId);
|
||||
clusterTaskDO.setAgentRollbackTaskId(ClusterTaskConstant.INVALID_AGENT_TASK_ID);
|
||||
clusterTaskDO.setHostList(ListUtils.strList2String(creationTaskDTO.getHostList()));
|
||||
clusterTaskDO.setPauseHostList(ListUtils.strList2String(creationTaskDTO.getPauseList()));
|
||||
clusterTaskDO.setRollbackHostList("");
|
||||
clusterTaskDO.setRollbackPauseHostList("");
|
||||
clusterTaskDO.setOperator(operator);
|
||||
return clusterTaskDO;
|
||||
}
|
||||
|
||||
public static CreationTaskData convert2CreationTaskData(ClusterTaskDO clusterTaskDO) {
|
||||
CreationTaskData creationTaskData = new CreationTaskData();
|
||||
creationTaskData.setUuid(clusterTaskDO.getUuid());
|
||||
creationTaskData.setClusterId(clusterTaskDO.getClusterId());
|
||||
creationTaskData.setHostList(ListUtils.string2StrList(clusterTaskDO.getRollbackHostList()));
|
||||
creationTaskData.setPauseList(ListUtils.string2StrList(clusterTaskDO.getRollbackPauseHostList()));
|
||||
creationTaskData.setTaskType(ClusterTaskConstant.CLUSTER_ROLLBACK);
|
||||
creationTaskData.setKafkaPackageName("");
|
||||
creationTaskData.setKafkaPackageMd5("");
|
||||
creationTaskData.setKafkaPackageUrl("");
|
||||
creationTaskData.setServerPropertiesName("");
|
||||
creationTaskData.setServerPropertiesMd5("");
|
||||
creationTaskData.setServerPropertiesUrl("");
|
||||
return creationTaskData;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.bizenum;
|
||||
|
||||
/**
|
||||
* 任务动作
|
||||
* @author zengqiao
|
||||
* @date 20/4/26
|
||||
*/
|
||||
public enum ClusterTaskActionEnum {
|
||||
START(0, "start"),
|
||||
PAUSE(1, "pause"),
|
||||
IGNORE(2, "ignore"),
|
||||
CANCEL(3, "cancel"),
|
||||
ROLLBACK(4, "rollback"),
|
||||
;
|
||||
private Integer code;
|
||||
|
||||
private String message;
|
||||
|
||||
ClusterTaskActionEnum(Integer code, String message) {
|
||||
this.code = code;
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public Integer getCode() {
|
||||
return code;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TaskActionEnum{" +
|
||||
"code=" + code +
|
||||
", message='" + message + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.bizenum;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.bizenum.TaskStatusEnum;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/9/7
|
||||
*/
|
||||
public enum ClusterTaskStateEnum {
|
||||
RUNNING(TaskStatusEnum.RUNNING),
|
||||
BLOCKED(TaskStatusEnum.BLOCKED),
|
||||
FINISHED(TaskStatusEnum.FINISHED),
|
||||
;
|
||||
|
||||
private Integer code;
|
||||
|
||||
private String message;
|
||||
|
||||
ClusterTaskStateEnum(TaskStatusEnum statusEnum) {
|
||||
this.code = statusEnum.getCode();
|
||||
this.message = statusEnum.getMessage();
|
||||
}
|
||||
|
||||
public Integer getCode() {
|
||||
return code;
|
||||
}
|
||||
|
||||
public void setCode(Integer code) {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ClusterTaskStateEnum{" +
|
||||
"code=" + code +
|
||||
", message='" + message + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.bizenum;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.bizenum.TaskStatusEnum;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/9/7
|
||||
*/
|
||||
public enum ClusterTaskSubStateEnum {
|
||||
WAITING(TaskStatusEnum.WAITING),
|
||||
RUNNING(TaskStatusEnum.RUNNING),
|
||||
FAILED(TaskStatusEnum.FAILED),
|
||||
SUCCEED(TaskStatusEnum.SUCCEED),
|
||||
TIMEOUT(TaskStatusEnum.TIMEOUT),
|
||||
CANCELED(TaskStatusEnum.CANCELED),
|
||||
IGNORED(TaskStatusEnum.IGNORED),
|
||||
KILLING(TaskStatusEnum.KILLING),
|
||||
KILL_FAILED(TaskStatusEnum.KILL_FAILED),
|
||||
;
|
||||
|
||||
private Integer code;
|
||||
|
||||
private String message;
|
||||
|
||||
ClusterTaskSubStateEnum(TaskStatusEnum statusEnum) {
|
||||
this.code = statusEnum.getCode();
|
||||
this.message = statusEnum.getMessage();
|
||||
}
|
||||
|
||||
public Integer getCode() {
|
||||
return code;
|
||||
}
|
||||
|
||||
public void setCode(Integer code) {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ClusterTaskSubState{" +
|
||||
"code=" + code +
|
||||
", message='" + message + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.bizenum;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ClusterTaskConstant;
|
||||
|
||||
/**
|
||||
* 集群任务类型
|
||||
* @author zengqiao
|
||||
* @date 20/5/20
|
||||
*/
|
||||
public enum ClusterTaskTypeEnum {
|
||||
ROLE_UPGRADE(
|
||||
ClusterTaskConstant.UPGRADE,
|
||||
ClusterTaskConstant.CLUSTER_ROLE_UPGRADE,
|
||||
"集群升级(按角色)",
|
||||
ClusterTaskConstant.CLUSTER_ROLE_BEAN_NAME
|
||||
),
|
||||
|
||||
HOST_UPGRADE(
|
||||
ClusterTaskConstant.UPGRADE,
|
||||
ClusterTaskConstant.CLUSTER_HOST_UPGRADE,
|
||||
"集群升级(按主机)",
|
||||
ClusterTaskConstant.CLUSTER_HOST_BEAN_NAME
|
||||
),
|
||||
|
||||
DEPLOY(
|
||||
ClusterTaskConstant.DEPLOY,
|
||||
ClusterTaskConstant.CLUSTER_HOST_DEPLOY,
|
||||
"集群部署",
|
||||
ClusterTaskConstant.CLUSTER_HOST_BEAN_NAME
|
||||
),
|
||||
|
||||
EXPAND(
|
||||
ClusterTaskConstant.DEPLOY,
|
||||
ClusterTaskConstant.CLUSTER_HOST_EXPAND,
|
||||
"集群扩容",
|
||||
ClusterTaskConstant.CLUSTER_HOST_BEAN_NAME),
|
||||
|
||||
ROLLBACK(
|
||||
ClusterTaskConstant.ROLLBACK,
|
||||
ClusterTaskConstant.CLUSTER_ROLLBACK,
|
||||
"集群回滚",
|
||||
""),
|
||||
;
|
||||
|
||||
private int way;
|
||||
|
||||
private String name;
|
||||
|
||||
private String message;
|
||||
|
||||
private String beanName;
|
||||
|
||||
ClusterTaskTypeEnum(int way, String name, String message, String beanName) {
|
||||
this.way = way;
|
||||
this.name = name;
|
||||
this.message = message;
|
||||
this.beanName = beanName;
|
||||
}
|
||||
|
||||
public int getWay() {
|
||||
return way;
|
||||
}
|
||||
|
||||
public void setWay(int way) {
|
||||
this.way = way;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public String getBeanName() {
|
||||
return beanName;
|
||||
}
|
||||
|
||||
public void setBeanName(String beanName) {
|
||||
this.beanName = beanName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ClusterTaskTypeEnum{" +
|
||||
"way=" + way +
|
||||
", name='" + name + '\'' +
|
||||
", message='" + message + '\'' +
|
||||
", beanName='" + beanName + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
public static ClusterTaskTypeEnum getByName(String name) {
|
||||
for (ClusterTaskTypeEnum elem: ClusterTaskTypeEnum.values()) {
|
||||
if (elem.getName().equals(name)) {
|
||||
return elem;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.entry;
|
||||
|
||||
/**
|
||||
* 升级部署常量
|
||||
* @author zengqiao
|
||||
* @date 20/5/20
|
||||
*/
|
||||
public class ClusterTaskConstant {
|
||||
public static final Long INVALID_AGENT_TASK_ID = -1L;
|
||||
|
||||
public static final String CLUSTER_ROLE_UPGRADE = "role_upgrade";
|
||||
public static final String CLUSTER_HOST_UPGRADE = "host_upgrade";
|
||||
public static final String CLUSTER_HOST_DEPLOY = "host_deploy";
|
||||
public static final String CLUSTER_HOST_EXPAND = "host_expand";
|
||||
public static final String CLUSTER_ROLLBACK = "rollback";
|
||||
|
||||
public static final String CLUSTER_ROLE_BEAN_NAME = "clusterRoleTaskService";
|
||||
public static final String CLUSTER_HOST_BEAN_NAME = "clusterHostTaskService";
|
||||
|
||||
public static final Integer UPGRADE = 0;
|
||||
public static final Integer DEPLOY = 1;
|
||||
public static final Integer ROLLBACK = 2;
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.entry.ao;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskStateEnum;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/5/19
|
||||
*/
|
||||
public class ClusterTaskStatus {
|
||||
private Long taskId;
|
||||
|
||||
private Long clusterId;
|
||||
|
||||
private Boolean rollback;
|
||||
|
||||
private ClusterTaskStateEnum status;
|
||||
|
||||
private List<ClusterTaskSubStatus> subStatusList;
|
||||
|
||||
public ClusterTaskStatus(Long taskId,
|
||||
Long clusterId,
|
||||
Boolean rollback,
|
||||
ClusterTaskStateEnum status,
|
||||
List<ClusterTaskSubStatus> subStatusList) {
|
||||
this.taskId = taskId;
|
||||
this.clusterId = clusterId;
|
||||
this.rollback = rollback;
|
||||
this.status = status;
|
||||
this.subStatusList = subStatusList;
|
||||
}
|
||||
|
||||
public Long getTaskId() {
|
||||
return taskId;
|
||||
}
|
||||
|
||||
public void setTaskId(Long taskId) {
|
||||
this.taskId = taskId;
|
||||
}
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public Boolean getRollback() {
|
||||
return rollback;
|
||||
}
|
||||
|
||||
public void setRollback(Boolean rollback) {
|
||||
this.rollback = rollback;
|
||||
}
|
||||
|
||||
public ClusterTaskStateEnum getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(ClusterTaskStateEnum status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public List<ClusterTaskSubStatus> getSubStatusList() {
|
||||
return subStatusList;
|
||||
}
|
||||
|
||||
public void setSubStatusList(List<ClusterTaskSubStatus> subStatusList) {
|
||||
this.subStatusList = subStatusList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ClusterTaskStatus{" +
|
||||
"taskId=" + taskId +
|
||||
", clusterId=" + clusterId +
|
||||
", rollback=" + rollback +
|
||||
", status=" + status +
|
||||
", subStatusList=" + subStatusList +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,62 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.entry.ao;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.bizenum.KafkaBrokerRoleEnum;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskSubStateEnum;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/26
|
||||
*/
|
||||
public class ClusterTaskSubStatus {
|
||||
private Integer groupNum;
|
||||
|
||||
private String hostname;
|
||||
|
||||
private ClusterTaskSubStateEnum status;
|
||||
|
||||
private List<KafkaBrokerRoleEnum> roleList;
|
||||
|
||||
public Integer getGroupNum() {
|
||||
return groupNum;
|
||||
}
|
||||
|
||||
public void setGroupNum(Integer groupNum) {
|
||||
this.groupNum = groupNum;
|
||||
}
|
||||
|
||||
public String getHostname() {
|
||||
return hostname;
|
||||
}
|
||||
|
||||
public void setHostname(String hostname) {
|
||||
this.hostname = hostname;
|
||||
}
|
||||
|
||||
public ClusterTaskSubStateEnum getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(ClusterTaskSubStateEnum status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public List<KafkaBrokerRoleEnum> getRoleList() {
|
||||
return roleList;
|
||||
}
|
||||
|
||||
public void setRoleList(List<KafkaBrokerRoleEnum> roleList) {
|
||||
this.roleList = roleList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ClusterTaskSubStatus{" +
|
||||
"groupNum=" + groupNum +
|
||||
", hostname='" + hostname + '\'' +
|
||||
", status=" + status +
|
||||
", roleList=" + roleList +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,136 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.entry.ao;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/5/20
|
||||
*/
|
||||
public class CreationTaskData {
|
||||
private String uuid;
|
||||
|
||||
private Long clusterId;
|
||||
|
||||
private List<String> hostList;
|
||||
|
||||
private List<String> pauseList;
|
||||
|
||||
private String taskType;
|
||||
|
||||
private String kafkaPackageName;
|
||||
|
||||
private String kafkaPackageMd5;
|
||||
|
||||
private String kafkaPackageUrl;
|
||||
|
||||
private String serverPropertiesName;
|
||||
|
||||
private String serverPropertiesMd5;
|
||||
|
||||
private String serverPropertiesUrl;
|
||||
|
||||
public String getUuid() {
|
||||
return uuid;
|
||||
}
|
||||
|
||||
public void setUuid(String uuid) {
|
||||
this.uuid = uuid;
|
||||
}
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public List<String> getHostList() {
|
||||
return hostList;
|
||||
}
|
||||
|
||||
public void setHostList(List<String> hostList) {
|
||||
this.hostList = hostList;
|
||||
}
|
||||
|
||||
public List<String> getPauseList() {
|
||||
return pauseList;
|
||||
}
|
||||
|
||||
public void setPauseList(List<String> pauseList) {
|
||||
this.pauseList = pauseList;
|
||||
}
|
||||
|
||||
public String getTaskType() {
|
||||
return taskType;
|
||||
}
|
||||
|
||||
public void setTaskType(String taskType) {
|
||||
this.taskType = taskType;
|
||||
}
|
||||
|
||||
public String getKafkaPackageName() {
|
||||
return kafkaPackageName;
|
||||
}
|
||||
|
||||
public void setKafkaPackageName(String kafkaPackageName) {
|
||||
this.kafkaPackageName = kafkaPackageName;
|
||||
}
|
||||
|
||||
public String getKafkaPackageMd5() {
|
||||
return kafkaPackageMd5;
|
||||
}
|
||||
|
||||
public void setKafkaPackageMd5(String kafkaPackageMd5) {
|
||||
this.kafkaPackageMd5 = kafkaPackageMd5;
|
||||
}
|
||||
|
||||
public String getKafkaPackageUrl() {
|
||||
return kafkaPackageUrl;
|
||||
}
|
||||
|
||||
public void setKafkaPackageUrl(String kafkaPackageUrl) {
|
||||
this.kafkaPackageUrl = kafkaPackageUrl;
|
||||
}
|
||||
|
||||
public String getServerPropertiesName() {
|
||||
return serverPropertiesName;
|
||||
}
|
||||
|
||||
public void setServerPropertiesName(String serverPropertiesName) {
|
||||
this.serverPropertiesName = serverPropertiesName;
|
||||
}
|
||||
|
||||
public String getServerPropertiesMd5() {
|
||||
return serverPropertiesMd5;
|
||||
}
|
||||
|
||||
public void setServerPropertiesMd5(String serverPropertiesMd5) {
|
||||
this.serverPropertiesMd5 = serverPropertiesMd5;
|
||||
}
|
||||
|
||||
public String getServerPropertiesUrl() {
|
||||
return serverPropertiesUrl;
|
||||
}
|
||||
|
||||
public void setServerPropertiesUrl(String serverPropertiesUrl) {
|
||||
this.serverPropertiesUrl = serverPropertiesUrl;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "CreationTaskDTO{" +
|
||||
"uuid='" + uuid + '\'' +
|
||||
", clusterId=" + clusterId +
|
||||
", hostList=" + hostList +
|
||||
", pauseList=" + pauseList +
|
||||
", taskType='" + taskType + '\'' +
|
||||
", kafkaPackageName='" + kafkaPackageName + '\'' +
|
||||
", kafkaPackageMd5='" + kafkaPackageMd5 + '\'' +
|
||||
", kafkaPackageUrl='" + kafkaPackageUrl + '\'' +
|
||||
", serverPropertiesName='" + serverPropertiesName + '\'' +
|
||||
", serverPropertiesMd5='" + serverPropertiesMd5 + '\'' +
|
||||
", serverPropertiesUrl='" + serverPropertiesUrl + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,143 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.entry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ClusterTaskConstant;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/21
|
||||
*/
|
||||
@JsonTypeInfo(
|
||||
use = JsonTypeInfo.Id.NAME,
|
||||
include = JsonTypeInfo.As.PROPERTY,
|
||||
property = AbstractClusterTaskDTO.TASK_TYPE_PROPERTY_FIELD_NAME,
|
||||
visible = true
|
||||
)
|
||||
@JsonSubTypes({
|
||||
@JsonSubTypes.Type(value = ClusterRoleTaskDTO.class, name = ClusterTaskConstant.CLUSTER_ROLE_UPGRADE),
|
||||
@JsonSubTypes.Type(value = ClusterHostTaskDTO.class, name = ClusterTaskConstant.CLUSTER_HOST_UPGRADE),
|
||||
@JsonSubTypes.Type(value = ClusterHostTaskDTO.class, name = ClusterTaskConstant.CLUSTER_HOST_DEPLOY),
|
||||
@JsonSubTypes.Type(value = ClusterHostTaskDTO.class, name = ClusterTaskConstant.CLUSTER_HOST_EXPAND),
|
||||
})
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
@ApiModel(description="集群任务")
|
||||
public abstract class AbstractClusterTaskDTO {
|
||||
/**
|
||||
* AbstractClusterTaskDTO 对象中必须存在 taskType 字段
|
||||
*/
|
||||
public static final String TASK_TYPE_PROPERTY_FIELD_NAME = "taskType";
|
||||
|
||||
@ApiModelProperty(value="集群ID")
|
||||
protected Long clusterId;
|
||||
|
||||
@ApiModelProperty(value="任务类型")
|
||||
protected String taskType;
|
||||
|
||||
@ApiModelProperty(value="Kafka包名称")
|
||||
protected String kafkaPackageName;
|
||||
|
||||
@ApiModelProperty(value="Kafka包Md5")
|
||||
protected String kafkaPackageMd5;
|
||||
|
||||
@ApiModelProperty(value="配置名称")
|
||||
protected String serverPropertiesName;
|
||||
|
||||
@ApiModelProperty(value="配置Md5")
|
||||
protected String serverPropertiesMd5;
|
||||
|
||||
@JsonIgnore
|
||||
private Map<String, List<String>> kafkaRoleBrokerHostMap;
|
||||
|
||||
@JsonIgnore
|
||||
private String kafkaFileBaseUrl;
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getTaskType() {
|
||||
return taskType;
|
||||
}
|
||||
|
||||
public void setTaskType(String taskType) {
|
||||
this.taskType = taskType;
|
||||
}
|
||||
|
||||
public String getKafkaPackageName() {
|
||||
return kafkaPackageName;
|
||||
}
|
||||
|
||||
public void setKafkaPackageName(String kafkaPackageName) {
|
||||
this.kafkaPackageName = kafkaPackageName;
|
||||
}
|
||||
|
||||
public String getKafkaPackageMd5() {
|
||||
return kafkaPackageMd5;
|
||||
}
|
||||
|
||||
public void setKafkaPackageMd5(String kafkaPackageMd5) {
|
||||
this.kafkaPackageMd5 = kafkaPackageMd5;
|
||||
}
|
||||
|
||||
public String getServerPropertiesName() {
|
||||
return serverPropertiesName;
|
||||
}
|
||||
|
||||
public void setServerPropertiesName(String serverPropertiesName) {
|
||||
this.serverPropertiesName = serverPropertiesName;
|
||||
}
|
||||
|
||||
public String getServerPropertiesMd5() {
|
||||
return serverPropertiesMd5;
|
||||
}
|
||||
|
||||
public void setServerPropertiesMd5(String serverPropertiesMd5) {
|
||||
this.serverPropertiesMd5 = serverPropertiesMd5;
|
||||
}
|
||||
|
||||
public Map<String, List<String>> getKafkaRoleBrokerHostMap() {
|
||||
return kafkaRoleBrokerHostMap;
|
||||
}
|
||||
|
||||
public void setKafkaRoleBrokerHostMap(Map<String, List<String>> kafkaRoleBrokerHostMap) {
|
||||
this.kafkaRoleBrokerHostMap = kafkaRoleBrokerHostMap;
|
||||
}
|
||||
|
||||
public String getKafkaFileBaseUrl() {
|
||||
return kafkaFileBaseUrl;
|
||||
}
|
||||
|
||||
public void setKafkaFileBaseUrl(String kafkaFileBaseUrl) {
|
||||
this.kafkaFileBaseUrl = kafkaFileBaseUrl;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AbstractClusterTaskDTO{" +
|
||||
"clusterId=" + clusterId +
|
||||
", taskType='" + taskType + '\'' +
|
||||
", kafkaPackageName='" + kafkaPackageName + '\'' +
|
||||
", kafkaPackageMd5='" + kafkaPackageMd5 + '\'' +
|
||||
", serverPropertiesName='" + serverPropertiesName + '\'' +
|
||||
", serverPropertiesMd5='" + serverPropertiesMd5 + '\'' +
|
||||
", kafkaRoleBrokerHostMap=" + kafkaRoleBrokerHostMap +
|
||||
", kafkaFileBaseUrl='" + kafkaFileBaseUrl + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
public abstract boolean paramLegal();
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.entry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/5/20
|
||||
*/
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
@ApiModel(description="集群[部署|升级|扩容]任务")
|
||||
public class ClusterHostTaskDTO extends AbstractClusterTaskDTO {
|
||||
@ApiModelProperty(value="主机列表")
|
||||
private List<String> hostList;
|
||||
|
||||
public List<String> getHostList() {
|
||||
return hostList;
|
||||
}
|
||||
|
||||
public void setHostList(List<String> hostList) {
|
||||
this.hostList = hostList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ClusterHostTaskDTO{" +
|
||||
"hostList=" + hostList +
|
||||
", clusterId=" + clusterId +
|
||||
", taskType='" + taskType + '\'' +
|
||||
", kafkaPackageName='" + kafkaPackageName + '\'' +
|
||||
", kafkaPackageMd5='" + kafkaPackageMd5 + '\'' +
|
||||
", serverPropertiesName='" + serverPropertiesName + '\'' +
|
||||
", serverPropertiesMd5='" + serverPropertiesMd5 + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean paramLegal() {
|
||||
if (ValidateUtils.isNull(clusterId)
|
||||
|| ValidateUtils.isBlank(taskType)
|
||||
|| ValidateUtils.isBlank(kafkaPackageName)
|
||||
|| ValidateUtils.isBlank(kafkaPackageMd5)
|
||||
|| ValidateUtils.isBlank(serverPropertiesName)
|
||||
|| ValidateUtils.isBlank(serverPropertiesMd5)
|
||||
|| ValidateUtils.isEmptyList(hostList)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.entry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/5/20
|
||||
*/
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
@ApiModel(description="集群升级任务")
|
||||
public class ClusterRoleTaskDTO extends AbstractClusterTaskDTO {
|
||||
@ApiModelProperty(value="升级顺序")
|
||||
private List<String> upgradeSequenceList;
|
||||
|
||||
@ApiModelProperty(value="忽略的主机")
|
||||
private List<String> ignoreList;
|
||||
|
||||
public List<String> getUpgradeSequenceList() {
|
||||
return upgradeSequenceList;
|
||||
}
|
||||
|
||||
public void setUpgradeSequenceList(List<String> upgradeSequenceList) {
|
||||
this.upgradeSequenceList = upgradeSequenceList;
|
||||
}
|
||||
|
||||
public List<String> getIgnoreList() {
|
||||
return ignoreList;
|
||||
}
|
||||
|
||||
public void setIgnoreList(List<String> ignoreList) {
|
||||
this.ignoreList = ignoreList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean paramLegal() {
|
||||
if (ValidateUtils.isNull(clusterId)
|
||||
|| ValidateUtils.isBlank(taskType)
|
||||
|| ValidateUtils.isBlank(kafkaPackageName)
|
||||
|| ValidateUtils.isBlank(kafkaPackageMd5)
|
||||
|| ValidateUtils.isBlank(serverPropertiesName)
|
||||
|| ValidateUtils.isBlank(serverPropertiesMd5)
|
||||
|| ValidateUtils.isEmptyList(upgradeSequenceList)
|
||||
|| ValidateUtils.isNull(ignoreList)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.entry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/27
|
||||
*/
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
@ApiModel(description="集群操作")
|
||||
public class ClusterTaskActionDTO {
|
||||
@ApiModelProperty(value="任务ID")
|
||||
private Long taskId;
|
||||
|
||||
@ApiModelProperty(value="动作")
|
||||
private String action;
|
||||
|
||||
@ApiModelProperty(value="主机")
|
||||
private String hostname;
|
||||
|
||||
public Long getTaskId() {
|
||||
return taskId;
|
||||
}
|
||||
|
||||
public void setTaskId(Long taskId) {
|
||||
this.taskId = taskId;
|
||||
}
|
||||
|
||||
public String getAction() {
|
||||
return action;
|
||||
}
|
||||
|
||||
public void setAction(String action) {
|
||||
this.action = action;
|
||||
}
|
||||
|
||||
public String getHostname() {
|
||||
return hostname;
|
||||
}
|
||||
|
||||
public void setHostname(String hostname) {
|
||||
this.hostname = hostname;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ClusterTaskActionDTO{" +
|
||||
"taskId=" + taskId +
|
||||
", action='" + action + '\'' +
|
||||
", hostname='" + hostname + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
public boolean paramLegal() {
|
||||
if (ValidateUtils.isNull(taskId)
|
||||
|| ValidateUtils.isBlank(action)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.component.agent;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskStateEnum;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskSubStateEnum;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.CreationTaskData;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
/**
|
||||
* Agent 抽象类
|
||||
* @author zengqiao
|
||||
* @date 20/4/26
|
||||
*/
|
||||
public abstract class AbstractAgent {
|
||||
/**
|
||||
* 创建任务
|
||||
*/
|
||||
public abstract Long createTask(CreationTaskData dto);
|
||||
|
||||
/**
|
||||
* 任务动作
|
||||
*/
|
||||
public abstract Boolean actionTask(Long taskId, String action);
|
||||
|
||||
/**
|
||||
* 任务动作
|
||||
*/
|
||||
public abstract Boolean actionHostTask(Long taskId, String action, String hostname);
|
||||
|
||||
/**
|
||||
* 获取任务状态
|
||||
*/
|
||||
public abstract ClusterTaskStateEnum getTaskState(Long agentTaskId);
|
||||
|
||||
/**
|
||||
* 获取任务结果
|
||||
*/
|
||||
public abstract Map<String, ClusterTaskSubStateEnum> getTaskResult(Long taskId);
|
||||
|
||||
/**
|
||||
* 获取任务日志
|
||||
*/
|
||||
public abstract String getTaskLog(Long agentTaskId, String hostname);
|
||||
}
|
||||
@@ -0,0 +1,225 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.component.agent.n9e;
|
||||
|
||||
import com.alibaba.fastjson.JSON;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.CreationTaskData;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.HttpUtils;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.JsonUtils;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskStateEnum;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskSubStateEnum;
|
||||
import com.xiaojukeji.kafka.manager.kcm.component.agent.AbstractAgent;
|
||||
import com.xiaojukeji.kafka.manager.kcm.component.agent.n9e.entry.N9eResult;
|
||||
import com.xiaojukeji.kafka.manager.kcm.component.agent.n9e.entry.N9eTaskResultDTO;
|
||||
import com.xiaojukeji.kafka.manager.kcm.component.agent.n9e.entry.N9eTaskStatusEnum;
|
||||
import com.xiaojukeji.kafka.manager.kcm.component.agent.n9e.entry.N9eTaskStdoutDTO;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/9/3
|
||||
*/
|
||||
@Service("abstractAgent")
|
||||
public class N9e extends AbstractAgent {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(N9e.class);
|
||||
|
||||
@Value("${agent.n9e.base-url}")
|
||||
private String baseUrl;
|
||||
|
||||
@Value("${agent.n9e.username}")
|
||||
private String username;
|
||||
|
||||
@Value("${agent.n9e.user-token}")
|
||||
private String userToken;
|
||||
|
||||
@Value("${agent.n9e.tpl-id}")
|
||||
private Integer tplId;
|
||||
|
||||
@Value("${agent.n9e.timeout}")
|
||||
private Integer timeout;
|
||||
|
||||
/**
|
||||
* 并发度,顺序执行
|
||||
*/
|
||||
private static final Integer BATCH = 1;
|
||||
|
||||
/**
|
||||
* 失败的容忍度为0
|
||||
*/
|
||||
private static final Integer TOLERANCE = 0;
|
||||
|
||||
private static final String CREATE_TASK_URI = "/api/job-ce/tasks";
|
||||
|
||||
private static final String ACTION_TASK_URI = "/api/job-ce/task/{taskId}/action";
|
||||
|
||||
private static final String ACTION_HOST_TASK_URI = "/api/job-ce/task/{taskId}/host";
|
||||
|
||||
private static final String TASK_STATE_URI = "/api/job-ce/task/{taskId}/state";
|
||||
|
||||
private static final String TASK_SUB_STATE_URI = "/api/job-ce/task/{taskId}/result";
|
||||
|
||||
private static final String TASK_STD_LOG_URI = "/api/job-ce/task/{taskId}/stdout.json";
|
||||
|
||||
@Override
|
||||
public Long createTask(CreationTaskData dto) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(dto.getKafkaPackageName()).append(",,").append(dto.getKafkaPackageMd5()).append(",,");
|
||||
sb.append(dto.getServerPropertiesName()).append(",,").append(dto.getServerPropertiesMd5());
|
||||
|
||||
Map<String, Object> param = new HashMap<>();
|
||||
param.put("tpl_id", tplId);
|
||||
param.put("batch", BATCH);
|
||||
param.put("tolerance", TOLERANCE);
|
||||
param.put("timeout", timeout);
|
||||
param.put("hosts", dto.getHostList());
|
||||
param.put("pause", ListUtils.strList2String(dto.getPauseList()));
|
||||
param.put("action", "pause");
|
||||
param.put("args", sb.toString());
|
||||
|
||||
String response = null;
|
||||
try {
|
||||
response = HttpUtils.postForString(
|
||||
baseUrl + CREATE_TASK_URI,
|
||||
JsonUtils.toJSONString(param),
|
||||
buildHeader()
|
||||
);
|
||||
N9eResult zr = JSON.parseObject(response, N9eResult.class);
|
||||
if (!ValidateUtils.isBlank(zr.getErr())) {
|
||||
return null;
|
||||
}
|
||||
return Long.valueOf(zr.getDat().toString());
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("create task failed, dto:{}.", dto, e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean actionTask(Long taskId, String action) {
|
||||
Map<String, Object> param = new HashMap<>(1);
|
||||
param.put("action", action);
|
||||
|
||||
String response = null;
|
||||
try {
|
||||
response = HttpUtils.postForString(
|
||||
baseUrl + ACTION_TASK_URI.replace("{taskId}", taskId.toString()),
|
||||
JSON.toJSONString(param),
|
||||
buildHeader()
|
||||
);
|
||||
N9eResult zr = JSON.parseObject(response, N9eResult.class);
|
||||
if (ValidateUtils.isBlank(zr.getErr())) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("action task failed, taskId:{}, action:{}.", taskId, action, e);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean actionHostTask(Long taskId, String action, String hostname) {
|
||||
Map<String, Object> param = new HashMap<>(3);
|
||||
param.put("action", action);
|
||||
param.put("hostname", hostname);
|
||||
|
||||
String response = null;
|
||||
try {
|
||||
response = HttpUtils.postForString(
|
||||
baseUrl + ACTION_HOST_TASK_URI.replace("{taskId}", taskId.toString()),
|
||||
JSON.toJSONString(param),
|
||||
buildHeader()
|
||||
);
|
||||
N9eResult zr = JSON.parseObject(response, N9eResult.class);
|
||||
if (ValidateUtils.isBlank(zr.getErr())) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("action task failed, taskId:{}, action:{}, hostname:{}.", taskId, action, hostname, e);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClusterTaskStateEnum getTaskState(Long agentTaskId) {
|
||||
String response = null;
|
||||
try {
|
||||
// 获取任务的state
|
||||
response = HttpUtils.get(
|
||||
baseUrl + TASK_STATE_URI.replace("{taskId}", agentTaskId.toString()), null
|
||||
);
|
||||
N9eResult n9eResult = JSON.parseObject(response, N9eResult.class);
|
||||
if (!ValidateUtils.isBlank(n9eResult.getErr())) {
|
||||
LOGGER.error("get response result failed, agentTaskId:{} response:{}.", agentTaskId, response);
|
||||
return null;
|
||||
}
|
||||
String state = JSON.parseObject(JSON.toJSONString(n9eResult.getDat()), String.class);
|
||||
N9eTaskStatusEnum n9eTaskStatusEnum = N9eTaskStatusEnum.getByMessage(state);
|
||||
if (ValidateUtils.isNull(n9eTaskStatusEnum)) {
|
||||
LOGGER.error("get task status failed, agentTaskId:{} state:{}.", agentTaskId, state);
|
||||
return null;
|
||||
}
|
||||
return n9eTaskStatusEnum.getStatus();
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("get task status failed, agentTaskId:{} response:{}.", agentTaskId, response, e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, ClusterTaskSubStateEnum> getTaskResult(Long agentTaskId) {
|
||||
String response = null;
|
||||
try {
|
||||
// 获取子任务的state
|
||||
response = HttpUtils.get(baseUrl + TASK_SUB_STATE_URI.replace("{taskId}", agentTaskId.toString()), null);
|
||||
N9eResult n9eResult = JSON.parseObject(response, N9eResult.class);
|
||||
|
||||
N9eTaskResultDTO n9eTaskResultDTO =
|
||||
JSON.parseObject(JSON.toJSONString(n9eResult.getDat()), N9eTaskResultDTO.class);
|
||||
return n9eTaskResultDTO.convert2HostnameStatusMap();
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("get task status failed, agentTaskId:{}.", agentTaskId, e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTaskLog(Long agentTaskId, String hostname) {
|
||||
String response = null;
|
||||
try {
|
||||
Map<String, String> params = new HashMap<>(1);
|
||||
params.put("hostname", hostname);
|
||||
|
||||
response = HttpUtils.get(baseUrl + TASK_STD_LOG_URI.replace("{taskId}", agentTaskId.toString()), params);
|
||||
N9eResult n9eResult = JSON.parseObject(response, N9eResult.class);
|
||||
if (!ValidateUtils.isBlank(n9eResult.getErr())) {
|
||||
LOGGER.error("get task log failed, agentTaskId:{} response:{}.", agentTaskId, response);
|
||||
return null;
|
||||
}
|
||||
List<N9eTaskStdoutDTO> dtoList =
|
||||
JSON.parseArray(JSON.toJSONString(n9eResult.getDat()), N9eTaskStdoutDTO.class);
|
||||
if (ValidateUtils.isEmptyList(dtoList)) {
|
||||
return "";
|
||||
}
|
||||
return dtoList.get(0).getStdout();
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("get task log failed, agentTaskId:{}.", agentTaskId, e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private Map<String, String> buildHeader() {
|
||||
Map<String,String> headers = new HashMap<>(1);
|
||||
headers.put("Content-Type", "application/json;charset=UTF-8");
|
||||
headers.put("X-User-Token", userToken);
|
||||
return headers;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.component.agent.n9e.entry;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/9/3
|
||||
*/
|
||||
public class N9eResult {
|
||||
private String err;
|
||||
|
||||
private Object dat;
|
||||
|
||||
public String getErr() {
|
||||
return err;
|
||||
}
|
||||
|
||||
public void setErr(String err) {
|
||||
this.err = err;
|
||||
}
|
||||
|
||||
public Object getDat() {
|
||||
return dat;
|
||||
}
|
||||
|
||||
public void setDat(Object dat) {
|
||||
this.dat = dat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "N9eResult{" +
|
||||
"err='" + err + '\'' +
|
||||
", dat=" + dat +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,187 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.component.agent.n9e.entry;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskSubStateEnum;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/9/7
|
||||
*/
|
||||
public class N9eTaskResultDTO {
|
||||
private List<String> waiting;
|
||||
|
||||
private List<String> running;
|
||||
|
||||
private List<String> failed;
|
||||
|
||||
private List<String> success;
|
||||
|
||||
private List<String> timeout;
|
||||
|
||||
private List<String> cancelled;
|
||||
|
||||
private List<String> ignored;
|
||||
|
||||
private List<String> killing;
|
||||
|
||||
private List<String> kill_failed;
|
||||
|
||||
public List<String> getWaiting() {
|
||||
return waiting;
|
||||
}
|
||||
|
||||
public void setWaiting(List<String> waiting) {
|
||||
this.waiting = waiting;
|
||||
}
|
||||
|
||||
public List<String> getRunning() {
|
||||
return running;
|
||||
}
|
||||
|
||||
public void setRunning(List<String> running) {
|
||||
this.running = running;
|
||||
}
|
||||
|
||||
public List<String> getFailed() {
|
||||
return failed;
|
||||
}
|
||||
|
||||
public void setFailed(List<String> failed) {
|
||||
this.failed = failed;
|
||||
}
|
||||
|
||||
public List<String> getSuccess() {
|
||||
return success;
|
||||
}
|
||||
|
||||
public void setSuccess(List<String> success) {
|
||||
this.success = success;
|
||||
}
|
||||
|
||||
public List<String> getTimeout() {
|
||||
return timeout;
|
||||
}
|
||||
|
||||
public void setTimeout(List<String> timeout) {
|
||||
this.timeout = timeout;
|
||||
}
|
||||
|
||||
public List<String> getCancelled() {
|
||||
return cancelled;
|
||||
}
|
||||
|
||||
public void setCancelled(List<String> cancelled) {
|
||||
this.cancelled = cancelled;
|
||||
}
|
||||
|
||||
public List<String> getIgnored() {
|
||||
return ignored;
|
||||
}
|
||||
|
||||
public void setIgnored(List<String> ignored) {
|
||||
this.ignored = ignored;
|
||||
}
|
||||
|
||||
public List<String> getKilling() {
|
||||
return killing;
|
||||
}
|
||||
|
||||
public void setKilling(List<String> killing) {
|
||||
this.killing = killing;
|
||||
}
|
||||
|
||||
public List<String> getKill_failed() {
|
||||
return kill_failed;
|
||||
}
|
||||
|
||||
public void setKill_failed(List<String> kill_failed) {
|
||||
this.kill_failed = kill_failed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "N9eTaskResultDTO{" +
|
||||
"waiting=" + waiting +
|
||||
", running=" + running +
|
||||
", failed=" + failed +
|
||||
", success=" + success +
|
||||
", timeout=" + timeout +
|
||||
", cancelled=" + cancelled +
|
||||
", ignored=" + ignored +
|
||||
", killing=" + killing +
|
||||
", kill_failed=" + kill_failed +
|
||||
'}';
|
||||
}
|
||||
|
||||
public Map<String, ClusterTaskSubStateEnum> convert2HostnameStatusMap() {
|
||||
Map<String, ClusterTaskSubStateEnum> hostnameStatusMap = new HashMap<>();
|
||||
if (ValidateUtils.isNull(waiting)) {
|
||||
waiting = new ArrayList<>();
|
||||
}
|
||||
for (String hostname: waiting) {
|
||||
hostnameStatusMap.put(hostname, ClusterTaskSubStateEnum.WAITING);
|
||||
}
|
||||
|
||||
if (ValidateUtils.isNull(running)) {
|
||||
running = new ArrayList<>();
|
||||
}
|
||||
for (String hostname: running) {
|
||||
hostnameStatusMap.put(hostname, ClusterTaskSubStateEnum.RUNNING);
|
||||
}
|
||||
|
||||
if (ValidateUtils.isNull(failed)) {
|
||||
failed = new ArrayList<>();
|
||||
}
|
||||
for (String hostname: failed) {
|
||||
hostnameStatusMap.put(hostname, ClusterTaskSubStateEnum.FAILED);
|
||||
}
|
||||
|
||||
if (ValidateUtils.isNull(success)) {
|
||||
success = new ArrayList<>();
|
||||
}
|
||||
for (String hostname: success) {
|
||||
hostnameStatusMap.put(hostname, ClusterTaskSubStateEnum.SUCCEED);
|
||||
}
|
||||
|
||||
if (ValidateUtils.isNull(timeout)) {
|
||||
timeout = new ArrayList<>();
|
||||
}
|
||||
for (String hostname: timeout) {
|
||||
hostnameStatusMap.put(hostname, ClusterTaskSubStateEnum.TIMEOUT);
|
||||
}
|
||||
|
||||
if (ValidateUtils.isNull(cancelled)) {
|
||||
cancelled = new ArrayList<>();
|
||||
}
|
||||
for (String hostname: cancelled) {
|
||||
hostnameStatusMap.put(hostname, ClusterTaskSubStateEnum.CANCELED);
|
||||
}
|
||||
|
||||
if (ValidateUtils.isNull(ignored)) {
|
||||
ignored = new ArrayList<>();
|
||||
}
|
||||
for (String hostname: ignored) {
|
||||
hostnameStatusMap.put(hostname, ClusterTaskSubStateEnum.IGNORED);
|
||||
}
|
||||
|
||||
if (ValidateUtils.isNull(killing)) {
|
||||
killing = new ArrayList<>();
|
||||
}
|
||||
for (String hostname: killing) {
|
||||
hostnameStatusMap.put(hostname, ClusterTaskSubStateEnum.KILLING);
|
||||
}
|
||||
|
||||
if (ValidateUtils.isNull(kill_failed)) {
|
||||
kill_failed = new ArrayList<>();
|
||||
}
|
||||
for (String hostname: kill_failed) {
|
||||
hostnameStatusMap.put(hostname, ClusterTaskSubStateEnum.KILL_FAILED);
|
||||
}
|
||||
return hostnameStatusMap;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.component.agent.n9e.entry;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskStateEnum;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/9/3
|
||||
*/
|
||||
public enum N9eTaskStatusEnum {
|
||||
DONE(0, "done", ClusterTaskStateEnum.FINISHED),
|
||||
PAUSE(1, "pause", ClusterTaskStateEnum.BLOCKED),
|
||||
START(2, "start", ClusterTaskStateEnum.RUNNING),
|
||||
;
|
||||
|
||||
private Integer code;
|
||||
|
||||
private String message;
|
||||
|
||||
private ClusterTaskStateEnum status;
|
||||
|
||||
N9eTaskStatusEnum(Integer code, String message, ClusterTaskStateEnum status) {
|
||||
this.code = code;
|
||||
this.message = message;
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public Integer getCode() {
|
||||
return code;
|
||||
}
|
||||
|
||||
public void setCode(Integer code) {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public ClusterTaskStateEnum getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(ClusterTaskStateEnum status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public static N9eTaskStatusEnum getByMessage(String message) {
|
||||
for (N9eTaskStatusEnum elem: N9eTaskStatusEnum.values()) {
|
||||
if (elem.message.equals(message)) {
|
||||
return elem;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.component.agent.n9e.entry;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/9/7
|
||||
*/
|
||||
public class N9eTaskStdoutDTO {
|
||||
private String host;
|
||||
|
||||
private String stdout;
|
||||
|
||||
public String getHost() {
|
||||
return host;
|
||||
}
|
||||
|
||||
public void setHost(String host) {
|
||||
this.host = host;
|
||||
}
|
||||
|
||||
public String getStdout() {
|
||||
return stdout;
|
||||
}
|
||||
|
||||
public void setStdout(String stdout) {
|
||||
this.stdout = stdout;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "N9eTaskStdoutDTO{" +
|
||||
"host='" + host + '\'' +
|
||||
", stdout='" + stdout + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.component.storage;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/29
|
||||
*/
|
||||
public abstract class AbstractStorageService {
|
||||
/**
|
||||
* 上传
|
||||
*/
|
||||
public abstract boolean upload(String fileName, String fileMd5, MultipartFile uploadFile);
|
||||
|
||||
/**
|
||||
* 下载
|
||||
*/
|
||||
public abstract Result<String> download(String fileName, String fileMd5);
|
||||
|
||||
/**
|
||||
* 下载base地址
|
||||
*/
|
||||
public abstract String getDownloadBaseUrl();
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.component.storage.common;
|
||||
|
||||
/**
|
||||
* 文件类型
|
||||
* @author zengqiao
|
||||
* @date 20/4/29
|
||||
*/
|
||||
public enum StorageEnum {
|
||||
GIFT(0, "gift"),
|
||||
GIT(1, "git"),
|
||||
S3(2, "S3"),
|
||||
;
|
||||
|
||||
private Integer code;
|
||||
|
||||
private String message;
|
||||
|
||||
StorageEnum(Integer code, String message) {
|
||||
this.code = code;
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public Integer getCode() {
|
||||
return code;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "StorageEnum{" +
|
||||
"code=" + code +
|
||||
", message='" + message + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.component.storage.local;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||
import org.springframework.stereotype.Service;
|
||||
import com.xiaojukeji.kafka.manager.kcm.component.storage.AbstractStorageService;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/9/17
|
||||
*/
|
||||
@Service("storageService")
|
||||
public class Local extends AbstractStorageService {
|
||||
@Override
|
||||
public boolean upload(String fileName, String fileMd5, MultipartFile uploadFile) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result<String> download(String fileName, String fileMd5) {
|
||||
return Result.buildFrom(ResultStatus.DOWNLOAD_FILE_FAIL);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDownloadBaseUrl() {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,310 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.impl;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
|
||||
import com.xiaojukeji.kafka.manager.kcm.ClusterTaskService;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.Converters;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskActionEnum;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ClusterTaskConstant;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.ClusterTaskSubStatus;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskStateEnum;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskSubStateEnum;
|
||||
import com.xiaojukeji.kafka.manager.kcm.component.agent.AbstractAgent;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.CreationTaskData;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.ClusterTaskStatus;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskTypeEnum;
|
||||
import com.xiaojukeji.kafka.manager.common.constant.Constant;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.dto.AbstractClusterTaskDTO;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import com.xiaojukeji.kafka.manager.dao.ClusterTaskDao;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterTaskDO;
|
||||
import com.xiaojukeji.kafka.manager.kcm.tasks.AbstractClusterTaskService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/5/19
|
||||
*/
|
||||
@Service("clusterTaskService")
|
||||
public class ClusterTaskServiceImpl implements ClusterTaskService {
|
||||
private final static Logger LOGGER = LoggerFactory.getLogger(ClusterTaskServiceImpl.class);
|
||||
|
||||
@Autowired
|
||||
private AbstractAgent abstractAgent;
|
||||
|
||||
@Autowired
|
||||
private ClusterTaskDao clusterTaskDao;
|
||||
|
||||
@Override
|
||||
public Result createTask(AbstractClusterTaskDTO dto, String operator) {
|
||||
if (!dto.paramLegal()) {
|
||||
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
|
||||
}
|
||||
|
||||
ClusterTaskTypeEnum taskTypeEnum = ClusterTaskTypeEnum.getByName(dto.getTaskType());
|
||||
if (ValidateUtils.isNull(taskTypeEnum)) {
|
||||
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
|
||||
}
|
||||
|
||||
AbstractClusterTaskService abstractClusterTaskService =
|
||||
SpringTool.getBean(taskTypeEnum.getBeanName());
|
||||
|
||||
// 构造参数
|
||||
Result<CreationTaskData> dtoResult = abstractClusterTaskService.getCreateTaskParamDTO(dto);
|
||||
if (!Constant.SUCCESS.equals(dtoResult.getCode())) {
|
||||
return dtoResult;
|
||||
}
|
||||
|
||||
// 创建任务
|
||||
Long agentTaskId = abstractAgent.createTask(dtoResult.getData());
|
||||
if (ValidateUtils.isNull(agentTaskId)) {
|
||||
return Result.buildFrom(ResultStatus.CALL_CLUSTER_TASK_AGENT_FAILED);
|
||||
}
|
||||
|
||||
try {
|
||||
if (clusterTaskDao.insert(Converters.convert2ClusterTaskDO(agentTaskId, dtoResult.getData(), operator)) > 0) {
|
||||
return Result.buildFrom(ResultStatus.SUCCESS);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("create cluster task failed, clusterTask:{}.", dto, e);
|
||||
}
|
||||
return Result.buildFrom(ResultStatus.MYSQL_ERROR);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultStatus executeTask(Long taskId, String action, String hostname) {
|
||||
ClusterTaskDO clusterTaskDO = this.getById(taskId);
|
||||
if (ValidateUtils.isNull(clusterTaskDO)) {
|
||||
return ResultStatus.RESOURCE_NOT_EXIST;
|
||||
}
|
||||
Long agentTaskId = getActiveAgentTaskId(clusterTaskDO);
|
||||
Boolean rollback = inRollback(clusterTaskDO);
|
||||
|
||||
ClusterTaskStateEnum stateEnum = abstractAgent.getTaskState(agentTaskId);
|
||||
if (ClusterTaskActionEnum.START.getMessage().equals(action)
|
||||
&& ClusterTaskStateEnum.BLOCKED.equals(stateEnum)) {
|
||||
// 暂停状态, 可以执行开始
|
||||
return actionTaskExceptRollbackAction(agentTaskId, action, "");
|
||||
}
|
||||
if (ClusterTaskActionEnum.PAUSE.getMessage().equals(action)
|
||||
&& ClusterTaskStateEnum.RUNNING.equals(stateEnum)) {
|
||||
// 运行状态, 可以执行暂停
|
||||
return actionTaskExceptRollbackAction(agentTaskId, action, "");
|
||||
}
|
||||
if (ClusterTaskActionEnum.IGNORE.getMessage().equals(action)
|
||||
|| ClusterTaskActionEnum.CANCEL.getMessage().equals(action)) {
|
||||
// 忽略 & 取消随时都可以操作
|
||||
return actionTaskExceptRollbackAction(agentTaskId, action, hostname);
|
||||
}
|
||||
if ((!ClusterTaskStateEnum.FINISHED.equals(stateEnum) || !rollback)
|
||||
&& ClusterTaskActionEnum.ROLLBACK.getMessage().equals(action)) {
|
||||
// 暂未操作完时可以回滚, 回滚所有操作过的机器到上一个版本
|
||||
return actionTaskRollback(clusterTaskDO);
|
||||
}
|
||||
return ResultStatus.OPERATION_FAILED;
|
||||
}
|
||||
|
||||
private ResultStatus actionTaskExceptRollbackAction(Long agentId, String action, String hostname) {
|
||||
if (!ValidateUtils.isBlank(hostname)) {
|
||||
return actionHostTaskExceptRollbackAction(agentId, action, hostname);
|
||||
}
|
||||
if (abstractAgent.actionTask(agentId, action)) {
|
||||
return ResultStatus.SUCCESS;
|
||||
}
|
||||
return ResultStatus.OPERATION_FAILED;
|
||||
}
|
||||
|
||||
private ResultStatus actionHostTaskExceptRollbackAction(Long agentId, String action, String hostname) {
|
||||
if (abstractAgent.actionHostTask(agentId, action, hostname)) {
|
||||
return ResultStatus.SUCCESS;
|
||||
}
|
||||
return ResultStatus.OPERATION_FAILED;
|
||||
}
|
||||
|
||||
private ResultStatus actionTaskRollback(ClusterTaskDO clusterTaskDO) {
|
||||
if (!ClusterTaskConstant.INVALID_AGENT_TASK_ID.equals(clusterTaskDO.getAgentRollbackTaskId())) {
|
||||
return ResultStatus.OPERATION_FORBIDDEN;
|
||||
}
|
||||
|
||||
Map<String, ClusterTaskSubStateEnum> subStatusEnumMap =
|
||||
abstractAgent.getTaskResult(clusterTaskDO.getAgentTaskId());
|
||||
if (ValidateUtils.isNull(subStatusEnumMap)) {
|
||||
return ResultStatus.CALL_CLUSTER_TASK_AGENT_FAILED;
|
||||
}
|
||||
|
||||
// 回滚顺序和升级顺序一致, 仅回滚操作过的机器
|
||||
List<String> rollbackHostList = new ArrayList<>();
|
||||
List<String> rollbackPauseHostList = new ArrayList<>();
|
||||
for (String host: ListUtils.string2StrList(clusterTaskDO.getHostList())) {
|
||||
ClusterTaskSubStateEnum subStateEnum = subStatusEnumMap.get(host);
|
||||
if (ValidateUtils.isNull(subStateEnum)) {
|
||||
// 机器对应的任务查询失败
|
||||
return ResultStatus.OPERATION_FAILED;
|
||||
}
|
||||
if (ClusterTaskSubStateEnum.WAITING.equals(subStateEnum)) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (rollbackPauseHostList.isEmpty()) {
|
||||
rollbackPauseHostList.add(host);
|
||||
}
|
||||
rollbackHostList.add(host);
|
||||
}
|
||||
if (ValidateUtils.isEmptyList(rollbackHostList)) {
|
||||
// 不存在需要回滚的机器, 返回操作失败
|
||||
return ResultStatus.OPERATION_FAILED;
|
||||
}
|
||||
|
||||
clusterTaskDO.setRollbackHostList(ListUtils.strList2String(rollbackHostList));
|
||||
clusterTaskDO.setRollbackPauseHostList(ListUtils.strList2String(rollbackPauseHostList));
|
||||
|
||||
// 创建任务
|
||||
Long agentTaskId = abstractAgent.createTask(Converters.convert2CreationTaskData(clusterTaskDO));
|
||||
if (ValidateUtils.isNull(agentTaskId)) {
|
||||
return ResultStatus.CALL_CLUSTER_TASK_AGENT_FAILED;
|
||||
}
|
||||
|
||||
try {
|
||||
clusterTaskDO.setAgentRollbackTaskId(agentTaskId);
|
||||
if (clusterTaskDao.updateRollback(clusterTaskDO) <= 0) {
|
||||
return ResultStatus.MYSQL_ERROR;
|
||||
}
|
||||
abstractAgent.actionTask(clusterTaskDO.getAgentTaskId(), ClusterTaskActionEnum.CANCEL.getMessage());
|
||||
return ResultStatus.SUCCESS;
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("create cluster task failed, clusterTaskDO:{}.", clusterTaskDO, e);
|
||||
}
|
||||
return ResultStatus.MYSQL_ERROR;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result<String> getTaskLog(Long taskId, String hostname) {
|
||||
ClusterTaskDO clusterTaskDO = this.getById(taskId);
|
||||
if (ValidateUtils.isNull(clusterTaskDO)) {
|
||||
return Result.buildFrom(ResultStatus.TASK_NOT_EXIST);
|
||||
}
|
||||
|
||||
String stdoutLog = abstractAgent.getTaskLog(getActiveAgentTaskId(clusterTaskDO, hostname), hostname);
|
||||
if (ValidateUtils.isNull(stdoutLog)) {
|
||||
return Result.buildFrom(ResultStatus.CALL_CLUSTER_TASK_AGENT_FAILED);
|
||||
}
|
||||
return new Result<>(stdoutLog);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result<ClusterTaskStatus> getTaskStatus(Long taskId) {
|
||||
ClusterTaskDO clusterTaskDO = this.getById(taskId);
|
||||
if (ValidateUtils.isNull(clusterTaskDO)) {
|
||||
return Result.buildFrom(ResultStatus.TASK_NOT_EXIST);
|
||||
}
|
||||
|
||||
return new Result<>(new ClusterTaskStatus(
|
||||
clusterTaskDO.getId(),
|
||||
clusterTaskDO.getClusterId(),
|
||||
inRollback(clusterTaskDO),
|
||||
abstractAgent.getTaskState(getActiveAgentTaskId(clusterTaskDO)),
|
||||
getTaskSubStatus(clusterTaskDO)
|
||||
));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClusterTaskStateEnum getTaskState(Long agentTaskId) {
|
||||
return abstractAgent.getTaskState(agentTaskId);
|
||||
}
|
||||
|
||||
private List<ClusterTaskSubStatus> getTaskSubStatus(ClusterTaskDO clusterTaskDO) {
|
||||
Map<String, ClusterTaskSubStateEnum> statusMap = this.getClusterTaskSubState(clusterTaskDO);
|
||||
if (ValidateUtils.isNull(statusMap)) {
|
||||
return null;
|
||||
}
|
||||
List<String> pauseList = ListUtils.string2StrList(clusterTaskDO.getPauseHostList());
|
||||
|
||||
int groupNum = 0;
|
||||
List<ClusterTaskSubStatus> subStatusList = new ArrayList<>();
|
||||
for (String host: ListUtils.string2StrList(clusterTaskDO.getHostList())) {
|
||||
ClusterTaskSubStatus subStatus = new ClusterTaskSubStatus();
|
||||
subStatus.setHostname(host);
|
||||
subStatus.setStatus(statusMap.get(host));
|
||||
subStatus.setGroupNum(groupNum);
|
||||
if (pauseList.size()> groupNum && pauseList.get(groupNum).equals(host)) {
|
||||
groupNum += 1;
|
||||
}
|
||||
subStatusList.add(subStatus);
|
||||
}
|
||||
return subStatusList;
|
||||
}
|
||||
|
||||
private Map<String, ClusterTaskSubStateEnum> getClusterTaskSubState(ClusterTaskDO clusterTaskDO) {
|
||||
Map<String, ClusterTaskSubStateEnum> statusMap = abstractAgent.getTaskResult(clusterTaskDO.getAgentTaskId());
|
||||
if (ValidateUtils.isNull(statusMap)) {
|
||||
return null;
|
||||
}
|
||||
if (!inRollback(clusterTaskDO)) {
|
||||
return statusMap;
|
||||
}
|
||||
|
||||
Map<String, ClusterTaskSubStateEnum> rollbackStatusMap =
|
||||
abstractAgent.getTaskResult(clusterTaskDO.getAgentRollbackTaskId());
|
||||
if (ValidateUtils.isNull(rollbackStatusMap)) {
|
||||
return null;
|
||||
}
|
||||
statusMap.putAll(rollbackStatusMap);
|
||||
return statusMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClusterTaskDO getById(Long taskId) {
|
||||
try {
|
||||
return clusterTaskDao.getById(taskId);
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("get cluster task failed, taskId:{}.", taskId);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ClusterTaskDO> listAll() {
|
||||
try {
|
||||
return clusterTaskDao.listAll();
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("get all cluster task failed.");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int updateTaskState(Long taskId, Integer taskStatus) {
|
||||
return clusterTaskDao.updateTaskState(taskId, taskStatus);
|
||||
}
|
||||
|
||||
private Long getActiveAgentTaskId(ClusterTaskDO clusterTaskDO) {
|
||||
if (ClusterTaskConstant.INVALID_AGENT_TASK_ID.equals(clusterTaskDO.getAgentRollbackTaskId())) {
|
||||
return clusterTaskDO.getAgentTaskId();
|
||||
}
|
||||
return clusterTaskDO.getAgentRollbackTaskId();
|
||||
}
|
||||
|
||||
private Long getActiveAgentTaskId(ClusterTaskDO clusterTaskDO, String hostname) {
|
||||
if (ClusterTaskConstant.INVALID_AGENT_TASK_ID.equals(clusterTaskDO.getAgentRollbackTaskId())) {
|
||||
return clusterTaskDO.getAgentTaskId();
|
||||
}
|
||||
if (ListUtils.string2StrList(clusterTaskDO.getRollbackHostList()).contains(hostname)) {
|
||||
return clusterTaskDO.getAgentRollbackTaskId();
|
||||
}
|
||||
return clusterTaskDO.getAgentTaskId();
|
||||
}
|
||||
|
||||
private boolean inRollback(ClusterTaskDO clusterTaskDO) {
|
||||
if (ClusterTaskConstant.INVALID_AGENT_TASK_ID.equals(clusterTaskDO.getAgentRollbackTaskId())) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,180 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.impl;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.bizenum.KafkaFileEnum;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.KafkaFileDTO;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import com.xiaojukeji.kafka.manager.dao.KafkaFileDao;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.KafkaFileDO;
|
||||
import com.xiaojukeji.kafka.manager.kcm.component.storage.AbstractStorageService;
|
||||
import com.xiaojukeji.kafka.manager.kcm.KafkaFileService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.dao.DuplicateKeyException;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author zhongyuankai
|
||||
* @date 2020/5/7
|
||||
*/
|
||||
@Service("kafkaFileService")
|
||||
public class KafkaFileServiceImpl implements KafkaFileService {
|
||||
private final static Logger LOGGER = LoggerFactory.getLogger(KafkaFileServiceImpl.class);
|
||||
|
||||
@Autowired
|
||||
private KafkaFileDao kafkaFileDao;
|
||||
|
||||
@Autowired
|
||||
private AbstractStorageService storageService;
|
||||
|
||||
@Override
|
||||
public ResultStatus uploadKafkaFile(KafkaFileDTO kafkaFileDTO, String username) {
|
||||
if (!kafkaFileDTO.createParamLegal()) {
|
||||
return ResultStatus.PARAM_ILLEGAL;
|
||||
}
|
||||
|
||||
KafkaFileDO kafkaFileDO = new KafkaFileDO();
|
||||
CopyUtils.copyProperties(kafkaFileDO, kafkaFileDTO);
|
||||
kafkaFileDO.setOperator(username);
|
||||
try {
|
||||
if (kafkaFileDao.insert(kafkaFileDO) <= 0) {
|
||||
return ResultStatus.MYSQL_ERROR;
|
||||
}
|
||||
if (!storageService.upload(
|
||||
kafkaFileDTO.getFileName(),
|
||||
kafkaFileDTO.getFileMd5(),
|
||||
kafkaFileDTO.getUploadFile())
|
||||
) {
|
||||
kafkaFileDao.deleteById(kafkaFileDO.getId());
|
||||
return ResultStatus.UPLOAD_FILE_FAIL;
|
||||
}
|
||||
return ResultStatus.SUCCESS;
|
||||
} catch (DuplicateKeyException e) {
|
||||
return ResultStatus.RESOURCE_ALREADY_EXISTED;
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("upload kafka file failed, kafkaFileDTO:{}.", kafkaFileDTO, e);
|
||||
}
|
||||
return ResultStatus.MYSQL_ERROR;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultStatus modifyKafkaFile(KafkaFileDTO kafkaFileDTO, String userName) {
|
||||
if (ValidateUtils.isNull(kafkaFileDTO) || !kafkaFileDTO.modifyParamLegal()) {
|
||||
return ResultStatus.PARAM_ILLEGAL;
|
||||
}
|
||||
|
||||
KafkaFileDO kafkaFileDO = null;
|
||||
try {
|
||||
kafkaFileDO = kafkaFileDao.getById(kafkaFileDTO.getId());
|
||||
if (ValidateUtils.isNull(kafkaFileDO)) {
|
||||
return ResultStatus.RESOURCE_NOT_EXIST;
|
||||
}
|
||||
KafkaFileEnum kafkaFileEnum = KafkaFileEnum.getByCode(kafkaFileDO.getFileType());
|
||||
if (ValidateUtils.isNull(kafkaFileEnum)) {
|
||||
return ResultStatus.OPERATION_FAILED;
|
||||
}
|
||||
if (!kafkaFileDTO.getFileName().endsWith(kafkaFileEnum.getSuffix())) {
|
||||
return ResultStatus.OPERATION_FAILED;
|
||||
}
|
||||
|
||||
KafkaFileDO newKafkaFileDO = new KafkaFileDO();
|
||||
newKafkaFileDO.setId(kafkaFileDO.getId());
|
||||
newKafkaFileDO.setFileName(kafkaFileDTO.getFileName());
|
||||
newKafkaFileDO.setFileMd5(kafkaFileDTO.getFileMd5());
|
||||
newKafkaFileDO.setDescription(kafkaFileDTO.getDescription());
|
||||
newKafkaFileDO.setOperator(userName);
|
||||
if (kafkaFileDao.updateById(newKafkaFileDO) <= 0) {
|
||||
return ResultStatus.MYSQL_ERROR;
|
||||
}
|
||||
} catch (DuplicateKeyException e) {
|
||||
return ResultStatus.RESOURCE_NAME_DUPLICATED;
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("modify kafka file failed, kafkaFileDTO:{}.", kafkaFileDTO, e);
|
||||
return ResultStatus.MYSQL_ERROR;
|
||||
}
|
||||
|
||||
if (storageService.upload(
|
||||
kafkaFileDTO.getFileName(),
|
||||
kafkaFileDTO.getFileMd5(),
|
||||
kafkaFileDTO.getUploadFile())
|
||||
) {
|
||||
return ResultStatus.SUCCESS;
|
||||
}
|
||||
|
||||
try {
|
||||
if (kafkaFileDao.updateById(kafkaFileDO) <= 0) {
|
||||
return ResultStatus.MYSQL_ERROR;
|
||||
}
|
||||
return ResultStatus.UPLOAD_FILE_FAIL;
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("rollback modify kafka file failed, kafkaFileDTO:{}.", kafkaFileDTO, e);
|
||||
}
|
||||
return ResultStatus.MYSQL_ERROR;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultStatus deleteKafkaFile(Long id) {
|
||||
try {
|
||||
if (kafkaFileDao.deleteById(id) > 0) {
|
||||
return ResultStatus.SUCCESS;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("delete kafka file failed, id:{}.", id, e);
|
||||
}
|
||||
return ResultStatus.MYSQL_ERROR;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<KafkaFileDO> getKafkaFiles() {
|
||||
try {
|
||||
return kafkaFileDao.list();
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("get kafka file list failed.", e);
|
||||
}
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaFileDO getFileById(Long id) {
|
||||
try {
|
||||
return kafkaFileDao.getById(id);
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("get kafka file failed, id:{}.", id, e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaFileDO getFileByFileName(String fileName) {
|
||||
try {
|
||||
return kafkaFileDao.getFileByFileName(fileName);
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("get kafka file failed, fileName:{}.", fileName, e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result<String> downloadKafkaConfigFile(Long fileId) {
|
||||
KafkaFileDO kafkaFileDO = kafkaFileDao.getById(fileId);
|
||||
if (ValidateUtils.isNull(kafkaFileDO)) {
|
||||
return Result.buildFrom(ResultStatus.RESOURCE_NOT_EXIST);
|
||||
}
|
||||
if (KafkaFileEnum.PACKAGE.getCode().equals(kafkaFileDO.getFileType())) {
|
||||
return Result.buildFrom(ResultStatus.FILE_TYPE_NOT_SUPPORT);
|
||||
}
|
||||
|
||||
return storageService.download(kafkaFileDO.getFileName(), kafkaFileDO.getFileMd5());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDownloadBaseUrl() {
|
||||
return storageService.getDownloadBaseUrl();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.tasks;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.constant.Constant;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.CreationTaskData;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.dto.AbstractClusterTaskDTO;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.UUIDUtils;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/5/20
|
||||
*/
|
||||
public abstract class AbstractClusterTaskService {
|
||||
|
||||
public Result<CreationTaskData> getCreateTaskParamDTO(AbstractClusterTaskDTO abstractClusterTaskDTO) {
|
||||
Result<CreationTaskData> operationHostResult = getOperationHosts(abstractClusterTaskDTO);
|
||||
if (!Constant.SUCCESS.equals(operationHostResult.getCode())) {
|
||||
return new Result<>(operationHostResult.getCode(), operationHostResult.getMessage());
|
||||
}
|
||||
|
||||
CreationTaskData dto = operationHostResult.getData();
|
||||
dto.setUuid(UUIDUtils.uuid());
|
||||
dto.setClusterId(abstractClusterTaskDTO.getClusterId());
|
||||
dto.setTaskType(abstractClusterTaskDTO.getTaskType());
|
||||
dto.setKafkaPackageName(abstractClusterTaskDTO.getKafkaPackageName());
|
||||
dto.setKafkaPackageMd5(abstractClusterTaskDTO.getKafkaPackageMd5());
|
||||
dto.setKafkaPackageUrl(
|
||||
abstractClusterTaskDTO.getKafkaFileBaseUrl() + "/" + abstractClusterTaskDTO.getKafkaPackageName()
|
||||
);
|
||||
dto.setServerPropertiesName(abstractClusterTaskDTO.getServerPropertiesName());
|
||||
dto.setServerPropertiesMd5(abstractClusterTaskDTO.getServerPropertiesMd5());
|
||||
dto.setServerPropertiesUrl(
|
||||
abstractClusterTaskDTO.getKafkaFileBaseUrl() + "/" + abstractClusterTaskDTO.getServerPropertiesName()
|
||||
);
|
||||
return new Result<>(dto);
|
||||
}
|
||||
|
||||
protected abstract Result<CreationTaskData> getOperationHosts(AbstractClusterTaskDTO abstractClusterTaskDTO);
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.tasks;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.NetUtils;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.CreationTaskData;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ClusterTaskConstant;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.dto.AbstractClusterTaskDTO;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.dto.ClusterHostTaskDTO;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/5/20
|
||||
*/
|
||||
@Service(ClusterTaskConstant.CLUSTER_HOST_BEAN_NAME)
|
||||
public class ClusterHostTaskService extends AbstractClusterTaskService {
|
||||
@Override
|
||||
public Result<CreationTaskData> getOperationHosts(AbstractClusterTaskDTO abstractClusterTaskDTO) {
|
||||
ClusterHostTaskDTO clusterHostTaskDTO = (ClusterHostTaskDTO) abstractClusterTaskDTO;
|
||||
|
||||
CreationTaskData dto = new CreationTaskData();
|
||||
for (String hostname: clusterHostTaskDTO.getHostList()) {
|
||||
if (!NetUtils.hostnameLegal(hostname)) {
|
||||
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
|
||||
}
|
||||
}
|
||||
dto.setHostList(clusterHostTaskDTO.getHostList());
|
||||
dto.setPauseList(Arrays.asList(clusterHostTaskDTO.getHostList().get(0)));
|
||||
return new Result<>(dto);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.tasks;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.CreationTaskData;
|
||||
import com.xiaojukeji.kafka.manager.common.bizenum.KafkaBrokerRoleEnum;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.ClusterTaskConstant;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.Result;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.dto.AbstractClusterTaskDTO;
|
||||
import com.xiaojukeji.kafka.manager.kcm.common.entry.dto.ClusterRoleTaskDTO;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/5/20
|
||||
*/
|
||||
@Service(ClusterTaskConstant.CLUSTER_ROLE_BEAN_NAME)
|
||||
public class ClusterRoleTaskService extends AbstractClusterTaskService {
|
||||
|
||||
@Override
|
||||
public Result<CreationTaskData> getOperationHosts(AbstractClusterTaskDTO abstractClusterTaskDTO) {
|
||||
/*
|
||||
* 考虑Kafka角色所在的Broker存在交集的情况, 同时Controller会漂移的情况
|
||||
* 1. 首先定义一台机器存在多种角色, 一台机器机器只会升级一次的这种简单规则,
|
||||
* 然后, 按照刚才定义的规则, 比如要求按照A, B, C顺序升级时, 则升级顺序为A, B-A, C-A-B
|
||||
* 最后是暂停点, 任意一种角色首次出现,都会被认定为暂停点. 暂停点操作机器后暂停, 不是操作集群之前暂停.
|
||||
* 2. Controller永远放在最后升级
|
||||
*/
|
||||
|
||||
ClusterRoleTaskDTO dto = (ClusterRoleTaskDTO) abstractClusterTaskDTO;
|
||||
Boolean existController = Boolean.FALSE;
|
||||
if (dto.getUpgradeSequenceList().remove(KafkaBrokerRoleEnum.CONTROLLER.getRole())) {
|
||||
existController = Boolean.TRUE;
|
||||
}
|
||||
|
||||
Map<String, List<String>> kafkaRoleMap = dto.getKafkaRoleBrokerHostMap();
|
||||
if (existController
|
||||
&& ValidateUtils.isEmptyList(kafkaRoleMap.get(KafkaBrokerRoleEnum.CONTROLLER.getRole()))) {
|
||||
return Result.buildFrom(ResultStatus.CONTROLLER_NOT_ALIVE);
|
||||
}
|
||||
|
||||
// 获取到 controller
|
||||
String controller = "";
|
||||
if (!ValidateUtils.isEmptyList(kafkaRoleMap.get(KafkaBrokerRoleEnum.CONTROLLER.getRole()))) {
|
||||
controller = kafkaRoleMap.get(KafkaBrokerRoleEnum.CONTROLLER.getRole()).get(0);
|
||||
}
|
||||
|
||||
if (ValidateUtils.isNull(dto.getIgnoreList())) {
|
||||
dto.setIgnoreList(new ArrayList<>());
|
||||
}
|
||||
|
||||
// 获取每个角色对应的机器
|
||||
List<String> hostList = new ArrayList<>();
|
||||
List<String> pauseList = new ArrayList<>();
|
||||
|
||||
Set<String> hostSet = new HashSet<>();
|
||||
for (String kafkaRole: dto.getUpgradeSequenceList()) {
|
||||
List<String> subHostList = kafkaRoleMap.get(kafkaRole);
|
||||
if (ValidateUtils.isEmptyList(subHostList)) {
|
||||
continue;
|
||||
}
|
||||
if (subHostList.contains(controller)) {
|
||||
existController = Boolean.TRUE;
|
||||
subHostList.remove(controller);
|
||||
}
|
||||
|
||||
List<String> notUsedSubHostList = subHostList
|
||||
.stream()
|
||||
.filter(elem -> !(hostSet.contains(elem) || dto.getIgnoreList().contains(elem)))
|
||||
.collect(Collectors.toList());
|
||||
if (ValidateUtils.isEmptyList(notUsedSubHostList)) {
|
||||
continue;
|
||||
}
|
||||
hostSet.addAll(notUsedSubHostList);
|
||||
|
||||
// 按照机器名进行排序, 尽量保证按照region进行升级
|
||||
Collections.sort(notUsedSubHostList);
|
||||
pauseList.add(notUsedSubHostList.get(0));
|
||||
hostList.addAll(notUsedSubHostList);
|
||||
}
|
||||
|
||||
if (existController && !ValidateUtils.isBlank(controller)) {
|
||||
// controller 放置于最后升级
|
||||
pauseList.add(controller);
|
||||
hostList.add(controller);
|
||||
}
|
||||
|
||||
CreationTaskData creationTaskDTO = new CreationTaskData();
|
||||
creationTaskDTO.setHostList(hostList);
|
||||
creationTaskDTO.setPauseList(pauseList);
|
||||
return new Result<>(creationTaskDTO);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
agent:
|
||||
n9e:
|
||||
base-url: http://127.0.0.1/api
|
||||
username: admin
|
||||
user-token: admin
|
||||
tpl-id: 123456
|
||||
timeout: 30
|
||||
Reference in New Issue
Block a user