This commit is contained in:
zengqiao
2020-03-19 17:59:34 +08:00
commit 229140f067
407 changed files with 46207 additions and 0 deletions

View File

@@ -0,0 +1,24 @@
package com.xiaojukeji.kafka.manager.web;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.scheduling.annotation.EnableScheduling;
/**
* 启动SpringBoot
* @author huangyiminghappy@163.com
* @date 2019-04-24
*/
@SpringBootApplication
@ComponentScan({"com.xiaojukeji.kafka.manager"})
@EnableScheduling
@EnableAutoConfiguration
public class MainApplication {
public static void main(String[] args) {
SpringApplication sa = new SpringApplication(MainApplication.class);
sa.run(args);
}
}

View File

@@ -0,0 +1,127 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.web.converters.AccountConverter;
import com.xiaojukeji.kafka.manager.web.model.AccountModel;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.po.AccountDO;
import com.xiaojukeji.kafka.manager.service.service.LoginService;
import com.xiaojukeji.kafka.manager.web.vo.AccountVO;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DuplicateKeyException;
import org.springframework.http.MediaType;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 19/5/3
*/
@Api(value = "AdminAccountController", description = "Account相关接口")
@RestController
@RequestMapping("api/v1/")
public class AdminAccountController {
private final static Logger logger = LoggerFactory.getLogger(AdminAccountController.class);
@Autowired
private LoginService loginService;
@ApiOperation(value = "添加账号", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = AccountVO.class)
@RequestMapping(value = "admin/accounts/account", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<AccountVO> addAccount(@RequestBody AccountModel reqObj) {
if (reqObj == null || !reqObj.insertLegal()) {
return new Result<>(StatusCode.PARAM_ERROR, "param error");
}
AccountDO accountDO = AccountConverter.convert2AccountDO(reqObj);
String password = accountDO.getPassword();
try {
Result result = loginService.addNewAccount(accountDO);
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return result;
}
} catch (DuplicateKeyException e) {
return new Result<>(StatusCode.PARAM_ERROR, "account already exist");
} catch (Exception e) {
logger.error("addAccount@AdminAccountController, create failed, req:{}.", reqObj, e);
return new Result<>(StatusCode.MY_SQL_INSERT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result<>(new AccountVO(accountDO.getUsername(), password, accountDO.getRole()));
}
@ApiOperation(value = "删除账号", httpMethod = "DELETE", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "admin/accounts/account", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result deleteAccount(@RequestParam("username") String username) {
if (StringUtils.isEmpty(username)) {
return new Result(StatusCode.PARAM_ERROR, "param error");
}
try {
if (!loginService.deleteByName(username)){
return new Result(StatusCode.MY_SQL_UPDATE_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
} catch (Exception e) {
logger.error("deleteAccount@AdminAccountController, delete failed, username:{}.", username, e);
return new Result(StatusCode.MY_SQL_UPDATE_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result();
}
@ApiOperation(value = "修改账号", httpMethod = "PUT", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "admin/accounts/account", method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result adminUpdateAccount(@RequestBody AccountModel reqObj) {
if (reqObj == null || !reqObj.modifyLegal()) {
return new Result(StatusCode.PARAM_ERROR, "参数错误");
}
AccountDO accountDO = AccountConverter.convert2AccountDO(reqObj);
try {
return loginService.updateAccount(accountDO, null);
} catch (Exception e) {
logger.error("updateAccount@AdminAccountController, update failed, req:{}.", reqObj, e);
return new Result(StatusCode.MY_SQL_UPDATE_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
}
@ApiOperation(value = "修改账号", httpMethod = "PUT", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "accounts/account", method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result selfUpdateAccount(@RequestBody AccountModel reqObj) {
if (reqObj == null || !reqObj.modifyLegal() || StringUtils.isEmpty(reqObj.getPassword()) || StringUtils.isEmpty(reqObj.getOldPassword())) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
AccountDO accountDO = AccountConverter.convert2AccountDO(reqObj);
try {
return loginService.updateAccount(accountDO, reqObj.getOldPassword());
} catch (Exception e) {
logger.error("updateAccount@AdminAccountController, update failed, req:{}.", reqObj, e);
return new Result(StatusCode.MY_SQL_UPDATE_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
}
@ApiOperation(value = "账号列表", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = AccountVO.class)
@RequestMapping(value = "admin/accounts", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<AccountVO>> listAccounts() {
try {
List<AccountDO> accountDOList = loginService.list();
return new Result<>(AccountConverter.convert2AccountVOList(accountDOList));
} catch (Exception e) {
logger.error("listAccounts@AdminAccountController, list failed.", e);
return new Result<>(StatusCode.MY_SQL_SELECT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
}
}

View File

@@ -0,0 +1,111 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.web.model.MigrationModel;
import com.xiaojukeji.kafka.manager.web.vo.MigrationDetailVO;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.ReassignmentStatusEnum;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.po.MigrationTaskDO;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.service.service.MigrationService;
import com.xiaojukeji.kafka.manager.web.converters.AdminMigrationConverter;
import com.xiaojukeji.kafka.manager.web.model.MigrationCreateModel;
import com.xiaojukeji.kafka.manager.web.vo.MigrationTaskVO;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 迁移相关接口
* @author zengqiao_cn@163.com,huangyiminghappy@163.com
* @date 19/4/3
*/
@Api(value = "AdminMigrationController", description = "AdminMigration相关接口")
@Controller
@RequestMapping("api/v1/admin/")
public class AdminMigrationController {
private static final Logger logger = LoggerFactory.getLogger(AdminMigrationController.class);
@Autowired
private MigrationService migrationService;
@Autowired
private ClusterService clusterService;
@ApiOperation(value = "创建迁移任务", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = MigrationDetailVO.class)
@RequestMapping(value = {"migration/tasks"}, method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<MigrationDetailVO> createMigrateTask(@RequestBody MigrationCreateModel reqObj) {
if (reqObj == null || !reqObj.legal()) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
Result<MigrationTaskDO> result = migrationService.createMigrationTask(
reqObj.getClusterId(),
reqObj.getTopicName(),
reqObj.getPartitionIdList(),
reqObj.getThrottle(),
reqObj.getBrokerIdList(),
reqObj.getDescription()
);
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return new Result<>(result.getCode(), result.getMessage());
}
return new Result<>(AdminMigrationConverter.convert2MigrationDetailVO(result.getData(), null));
}
@ApiOperation(value = "操作迁移任务[触发执行|修改限流|取消]", httpMethod = "PUT", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = {"migration/tasks"}, method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result executeMigrateTask(@RequestBody MigrationModel reqObj) {
if (reqObj == null || reqObj.getTaskId() == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
if ("start".equals(reqObj.getAction())) {
return migrationService.executeMigrationTask(reqObj.getTaskId());
} else if ("modify".equals(reqObj.getAction())) {
return migrationService.modifyMigrationTask(reqObj.getTaskId(), reqObj.getThrottle());
} else if ("cancel".equals(reqObj.getAction())) {
return migrationService.deleteMigrationTask(reqObj.getTaskId());
}
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
@ApiOperation(value = "查看迁移进度详情", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = MigrationDetailVO.class)
@RequestMapping(value = {"migration/tasks/{taskId}"}, method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<MigrationDetailVO> getMigrateTaskDetail(@PathVariable Long taskId) {
MigrationTaskDO migrationTaskDO = migrationService.getMigrationTask(taskId);
if (migrationTaskDO == null) {
return new Result<>(StatusCode.PARAM_ERROR, "taskId illegal");
}
ClusterDO cluster = clusterService.getById(migrationTaskDO.getClusterId());
if (cluster == null) {
return new Result<>(StatusCode.OPERATION_ERROR, "task illegal, cluster not exist");
}
Map<Integer, Integer> migrationStatusMap = ReassignmentStatusEnum.WAITING.getCode().equals(migrationTaskDO.getStatus())? new HashMap<>(): migrationService.getMigrationStatus(cluster, migrationTaskDO.getReassignmentJson());
return new Result<>(AdminMigrationConverter.convert2MigrationDetailVO(migrationTaskDO, migrationStatusMap));
}
@ApiOperation(value = "迁移任务列表", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = MigrationTaskVO.class)
@RequestMapping(value = {"migration/tasks"}, method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<MigrationTaskVO>> getMigrationTask() {
List<ClusterDO> clusterDOList = clusterService.listAll();
return new Result<>(AdminMigrationConverter.convert2MigrationTaskVOList(migrationService.getMigrationTaskList(), clusterDOList));
}
}

View File

@@ -0,0 +1,81 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.PreferredReplicaElectEnum;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterDO;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.service.service.impl.AdminPreferredReplicaElectServiceImpl;
import com.xiaojukeji.kafka.manager.service.utils.SpringContextHolder;
import com.xiaojukeji.kafka.manager.web.model.RebalanceModel;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
/**
* 优先副本选举
* @author zengqiao
* @date 2019-04-22
*/
@Api(value = "AdminRebalanceController", description = "优先副本选举相关接口")
@Controller
@RequestMapping("api/v1/admin/utils/")
public class AdminRebalanceController {
@Autowired
private ClusterService clusterService;
@Autowired
private AdminPreferredReplicaElectServiceImpl adminPreferredReplicaElectService;
private static final Logger logger = LoggerFactory.getLogger(AdminRebalanceController.class);
@ApiOperation(value = "查看优先副本选举状态", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = PreferredReplicaElectEnum.class)
@RequestMapping(value = "rebalance/clusters/{clusterId}/status", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<PreferredReplicaElectEnum> preferredReplicaElectStatus(@PathVariable Long clusterId) {
if (clusterId == null || clusterId < 0) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO clusterDO = clusterService.getById(clusterId);
if (clusterDO == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
PreferredReplicaElectEnum preferredReplicaElectEnum = adminPreferredReplicaElectService.preferredReplicaElectionStatus(clusterDO);
return new Result<>(preferredReplicaElectEnum);
}
@ApiOperation(value = "进行优先副本选举", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "rebalance", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result preferredReplicaElect(@RequestBody RebalanceModel reqObj) {
if (reqObj == null || !reqObj.legal()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO clusterDO = clusterService.getById(reqObj.getClusterId());
if (clusterDO == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
String operator = SpringContextHolder.getUserName();
PreferredReplicaElectEnum preferredReplicaElectEnum = null;
if (reqObj.getDimension().equals(0)) {
preferredReplicaElectEnum = adminPreferredReplicaElectService.preferredReplicaElection(clusterDO, operator);
} else if (reqObj.getDimension().equals(1)) {
preferredReplicaElectEnum = adminPreferredReplicaElectService.preferredReplicaElection(clusterDO, reqObj.getBrokerId(), operator);
} else {
// TODO: 19/7/8 Topic维度优先副本选举
}
if (PreferredReplicaElectEnum.SUCCESS.equals(preferredReplicaElectEnum)) {
return new Result();
}
return new Result(StatusCode.OPERATION_ERROR, preferredReplicaElectEnum.getMessage());
}
}

View File

@@ -0,0 +1,95 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.web.converters.RegionModelConverter;
import com.xiaojukeji.kafka.manager.web.vo.RegionVO;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.service.service.RegionService;
import com.xiaojukeji.kafka.manager.service.utils.SpringContextHolder;
import com.xiaojukeji.kafka.manager.web.model.RegionModel;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 19/4/21
*/
@Api(value = "RegionController", description = "Region相关接口")
@Controller
@RequestMapping("api/v1/admin/")
public class AdminRegionController {
private static final Logger logger = LoggerFactory.getLogger(AdminRegionController.class);
@Autowired
private RegionService regionService;
@ApiOperation(value = "查询Region列表", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = RegionVO.class)
@RequestMapping(value = "{clusterId}/regions", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<RegionVO>> getRegionList(@ApiParam(name = "clusterId", required = true, value = "集群ID") @PathVariable Long clusterId) {
if (clusterId == null || clusterId <= 0) {
return new Result<>(StatusCode.PARAM_ERROR, "clusterId illegal");
}
return new Result<>(RegionModelConverter.convert2RegionVOList(regionService.getByClusterId(clusterId)));
}
@ApiOperation(value = "删除Region", httpMethod = "DELETE", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "regions/{regionId}", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result deleteTag(@ApiParam(name = "regionId", required = true) @PathVariable Long regionId) {
if (regionId == null || regionId < 0) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, regionId illegal");
}
if (!regionService.deleteById(regionId)) {
return new Result(StatusCode.MY_SQL_DELETE_ERROR, "delete region failed, maybe region not exist");
}
return new Result();
}
@ApiOperation(value = "添加Region", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success")
@RequestMapping(value = "regions/region", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result addNewRegion(@RequestBody RegionModel reqObj) {
if (reqObj == null || !reqObj.legal()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, data is empty or illegal");
}
try {
String operator = SpringContextHolder.getUserName();
return regionService.createRegion(RegionModelConverter.convert2RegionDO(reqObj, operator));
} catch (Exception e) {
logger.error("addNewRegion@RegionController, create region failed, req:{}.", reqObj, e);
}
return new Result(StatusCode.OPERATION_ERROR, "create region failed");
}
@ApiOperation(value = "更新Region", httpMethod = "PUT", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success")
@RequestMapping(value = "regions/region", method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result updateRegion(@RequestBody RegionModel reqObj) {
if (reqObj == null || !reqObj.legal()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, data is empty or illegal");
}
try {
String operator = SpringContextHolder.getUserName();
return regionService.updateRegion(RegionModelConverter.convert2RegionDO(reqObj, operator));
} catch (Exception e) {
logger.error("updateRegion@RegionController, update region failed, req:{}.", reqObj, e);
}
return new Result(StatusCode.OPERATION_ERROR, "update region failed");
}
}

View File

@@ -0,0 +1,216 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.xiaojukeji.kafka.manager.web.model.topic.AdminTopicModel;
import com.xiaojukeji.kafka.manager.web.model.topic.TopicDeleteModel;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.AdminTopicStatusEnum;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.po.TopicDO;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.TopicMetadata;
import com.xiaojukeji.kafka.manager.service.cache.ClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.service.service.RegionService;
import com.xiaojukeji.kafka.manager.service.service.TopicManagerService;
import com.xiaojukeji.kafka.manager.service.service.ZookeeperService;
import com.xiaojukeji.kafka.manager.service.utils.SpringContextHolder;
import com.xiaojukeji.kafka.manager.service.service.impl.AdminTopicServiceImpl;
import com.xiaojukeji.kafka.manager.web.converters.AdminUtilConverter;
import com.xiaojukeji.kafka.manager.web.model.topic.AdminExpandTopicModel;
import com.xiaojukeji.kafka.manager.web.vo.topic.TopicDeleteVO;
import com.xiaojukeji.kafka.manager.web.vo.topic.TopicDetailVO;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
/**
* 工具类
* @author zengqiao
* @date 2019-04-22
*/
@Api(value = "AdminUtilsController", description = "AdminUtil相关接口")
@Controller
@RequestMapping("api/v1/admin/")
public class AdminUtilsController {
@Autowired
private AdminTopicServiceImpl adminTopicService;
@Autowired
private TopicManagerService topicManagerService;
@Autowired
private ClusterService clusterService;
@Autowired
private RegionService regionService;
@Autowired
private ZookeeperService zookeeperService;
private static final Logger logger = LoggerFactory.getLogger(AdminUtilsController.class);
@ApiOperation(value = "创建Topic", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = {"utils/topic"}, method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result createCommonTopic(@RequestBody AdminTopicModel reqObj) {
if (reqObj == null || !reqObj.createParamLegal()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO clusterDO = clusterService.getById(reqObj.getClusterId());
if (clusterDO == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
List<Integer> brokerIdList = regionService.getFullBrokerId(clusterDO.getId(), reqObj.getRegionIdList(), reqObj.getBrokerIdList());
if (brokerIdList == null || brokerIdList.isEmpty()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, brokerIdList or regionIdList illegal");
}
Properties properties = null;
if (StringUtils.isEmpty(reqObj.getProperties())) {
properties = new Properties();
} else {
properties = JSON.parseObject(reqObj.getProperties(), Properties.class);
}
properties.put("retention.ms", String.valueOf(reqObj.getRetentionTime()));
TopicDO topicDO = AdminUtilConverter.convert2TopicDO(reqObj);
TopicMetadata topicMetadata = AdminUtilConverter.convert2TopicMetadata(reqObj.getTopicName(), reqObj.getPartitionNum(), reqObj.getReplicaNum(), brokerIdList);
String operator = SpringContextHolder.getUserName();
AdminTopicStatusEnum adminTopicStatusEnum = adminTopicService.createTopic(clusterDO, topicMetadata, topicDO, properties, operator);
if (AdminTopicStatusEnum.SUCCESS.equals(adminTopicStatusEnum)) {
return new Result();
}
return new Result(StatusCode.OPERATION_ERROR, adminTopicStatusEnum.getMessage());
}
@ApiOperation(value = "修改Topic", httpMethod = "PUT", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = {"utils/topic/config"}, method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result modifyTopic(@RequestBody AdminTopicModel reqObj) {
if (reqObj == null || !reqObj.modifyConfigParamLegal()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, data is empty");
}
ClusterDO clusterDO = clusterService.getById(reqObj.getClusterId());
if (clusterDO == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
// 获取属性
Properties properties = new Properties();
try {
if (!StringUtils.isEmpty(reqObj.getProperties())) {
properties = JSONObject.parseObject(reqObj.getProperties(), Properties.class);
}
properties.setProperty("retention.ms", String.valueOf(reqObj.getRetentionTime()));
} catch (Exception e) {
logger.error("modifyTopic@AdminUtilsController, modify failed, req:{}.", reqObj, e);
return new Result(StatusCode.PARAM_ERROR, "param illegal, properties illegal");
}
TopicDO topicDO = AdminUtilConverter.convert2TopicDO(reqObj);
// 操作修改
String operator = SpringContextHolder.getUserName();
AdminTopicStatusEnum adminTopicStatusEnum = adminTopicService.modifyTopic(clusterDO, topicDO, properties, operator);
if (AdminTopicStatusEnum.SUCCESS.equals(adminTopicStatusEnum)) {
return new Result();
}
return new Result(StatusCode.OPERATION_ERROR, adminTopicStatusEnum.getMessage());
}
@ApiOperation(value = "删除Topic", httpMethod = "DELETE", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicDeleteVO.class)
@RequestMapping(value = {"utils/topic"}, method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<TopicDeleteVO>> deleteTopic(@RequestBody TopicDeleteModel reqObj) {
if (reqObj == null || !reqObj.legal()) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO clusterDO = clusterService.getById(reqObj.getClusterId());
if (clusterDO == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
String operator = SpringContextHolder.getUserName();
List<TopicDeleteVO> topicDeleteVOList = new ArrayList<>();
for (String topicName: reqObj.getTopicNameList()) {
if (StringUtils.isEmpty(topicName)) {
topicDeleteVOList.add(new TopicDeleteVO(clusterDO.getId(), topicName, "topic name illegal", StatusCode.PARAM_ERROR));
continue;
}
AdminTopicStatusEnum adminTopicStatusEnum = adminTopicService.deleteTopic(clusterDO, topicName, operator);
if (AdminTopicStatusEnum.SUCCESS.equals(adminTopicStatusEnum)) {
topicDeleteVOList.add(new TopicDeleteVO(clusterDO.getId(), topicName, adminTopicStatusEnum.getMessage(), StatusCode.SUCCESS));
} else {
topicDeleteVOList.add(new TopicDeleteVO(clusterDO.getId(), topicName, adminTopicStatusEnum.getMessage(), StatusCode.OPERATION_ERROR));
}
}
return new Result<>(topicDeleteVOList);
}
@ApiOperation(value = "Topic扩容", httpMethod = "PUT", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = {"utils/topic/dilatation"}, method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result expandTopic(@RequestBody AdminExpandTopicModel reqObj) {
if (reqObj == null || !reqObj.legal()) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO clusterDO = clusterService.getById(reqObj.getClusterId());
if (clusterDO == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
if (!ClusterMetadataManager.isTopicExist(clusterDO.getId(), reqObj.getTopicName())) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, topic not exist");
}
List<Integer> brokerIdList = regionService.getFullBrokerId(reqObj.getClusterId(), reqObj.getRegionIdList(), reqObj.getBrokerIdList());
if (brokerIdList == null || brokerIdList.isEmpty()) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, brokerId or regionId illegal");
}
TopicMetadata topicMetadata = AdminUtilConverter.convert2TopicMetadata(reqObj.getTopicName(), reqObj.getPartitionNum(), -1, brokerIdList);
AdminTopicStatusEnum adminTopicStatusEnum = adminTopicService.expandTopic(clusterDO, topicMetadata, SpringContextHolder.getUserName());
if (AdminTopicStatusEnum.SUCCESS.equals(adminTopicStatusEnum)) {
return new Result();
}
return new Result(StatusCode.OPERATION_ERROR, adminTopicStatusEnum.getMessage());
}
@ApiOperation(value = "Topic详情", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicDetailVO.class)
@RequestMapping(value = {"utils/{clusterId}/topics/{topicName}/detail"}, method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<TopicDetailVO> getTopicDetail(@PathVariable Long clusterId, @PathVariable String topicName) {
if (clusterId == null || clusterId <= 0 || StringUtils.isEmpty(topicName)) {
return new Result<>(StatusCode.PARAM_ERROR, "params illegal");
}
ClusterDO clusterDO = clusterService.getById(clusterId);
if (clusterDO == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
TopicMetadata topicMetadata = ClusterMetadataManager.getTopicMetaData(clusterId, topicName);
if (topicMetadata == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, topic not exist");
}
TopicDO topicDO = topicManagerService.getByTopicName(clusterId, topicName);
Properties properties = null;
try {
properties = zookeeperService.getTopicProperties(clusterId, topicName);
} catch (Exception e) {
logger.error("");
}
return new Result<>(AdminUtilConverter.convert2TopicDetailVO(clusterDO, topicMetadata, properties, topicDO));
}
}

View File

@@ -0,0 +1,112 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.po.AlarmRuleDO;
import com.xiaojukeji.kafka.manager.common.constant.monitor.MonitorConditionType;
import com.xiaojukeji.kafka.manager.common.constant.monitor.MonitorMetricsType;
import com.xiaojukeji.kafka.manager.common.constant.monitor.MonitorNotifyType;
import com.xiaojukeji.kafka.manager.service.service.AlarmRuleService;
import com.xiaojukeji.kafka.manager.service.utils.SpringContextHolder;
import com.xiaojukeji.kafka.manager.web.converters.AlarmConverter;
import com.xiaojukeji.kafka.manager.web.model.alarm.AlarmRuleModel;
import com.xiaojukeji.kafka.manager.web.vo.alarm.AlarmConstantVO;
import com.xiaojukeji.kafka.manager.web.vo.alarm.AlarmRuleVO;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 19/5/12
*/
@Api(value = "AlarmController", description = "Alarm相关接口")
@Controller
@RequestMapping("api/v1/")
public class AlarmController {
private final static Logger logger = LoggerFactory.getLogger(AlarmController.class);
@Autowired
private AlarmRuleService alarmRuleManagerService;
@ApiOperation(value = "添加告警规则", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "alarms/alarm-rule", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result addAlarmRule(@RequestBody AlarmRuleModel alarmModel) {
if (alarmModel == null || !alarmModel.legal()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
return alarmRuleManagerService.addAlarmRule(AlarmConverter.convert2AlarmRuleDO(SpringContextHolder.getUserName(), alarmModel));
}
@ApiOperation(value = "删除告警规则", httpMethod = "DELETE", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "alarms/alarm-rule", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result deleteAlarmRule(@RequestParam("alarmRuleId") Long alarmRuleId) {
if (alarmRuleId == null) {
return new Result(StatusCode.PARAM_ERROR, "param error");
}
return alarmRuleManagerService.deleteById(SpringContextHolder.getUserName(), alarmRuleId);
}
@ApiOperation(value = "修改告警规则", httpMethod = "PUT", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "alarms/alarm-rule", method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result modifyAlarmRule(@RequestBody AlarmRuleModel reqObj) {
if (reqObj == null || !reqObj.legal() || reqObj.getId() == null || reqObj.getStatus() == null) {
return new Result(StatusCode.PARAM_ERROR, "param error");
}
return alarmRuleManagerService.updateById(SpringContextHolder.getUserName(), AlarmConverter.convert2AlarmRuleDO(SpringContextHolder.getUserName(), reqObj));
}
@ApiOperation(value = "查询告警规则", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = AlarmRuleVO.class)
@RequestMapping(value = "alarms/alarm-rules/{alarmRuleId}", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<AlarmRuleVO> getAlarmRule(@PathVariable Long alarmRuleId) {
if (alarmRuleId == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param error");
}
AlarmRuleDO alarmRuleDO = alarmRuleManagerService.getById(alarmRuleId);
if (alarmRuleDO == null) {
return new Result<>(StatusCode.PARAM_ERROR, "alarm not exist");
}
return new Result<>(AlarmConverter.convert2AlarmRuleVO(alarmRuleDO));
}
@ApiOperation(value = "查询告警规则列表", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = AlarmRuleVO.class)
@RequestMapping(value = "alarms/alarm-rules", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<AlarmRuleVO>> listAlarmRules() {
List<AlarmRuleDO> alarmRuleDOList = alarmRuleManagerService.listAll();
if (alarmRuleDOList == null) {
return new Result<>(StatusCode.MY_SQL_SELECT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result<>(AlarmConverter.convert2AlarmRuleVOList(alarmRuleDOList));
}
@ApiOperation(value = "告警相关常量", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = AlarmConstantVO.class)
@RequestMapping(value = "alarms/alarm/constant", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<AlarmConstantVO> getAlarmConstant() {
AlarmConstantVO alarmConstantVO = new AlarmConstantVO();
alarmConstantVO.setConditionTypeList(MonitorConditionType.toList());
alarmConstantVO.setNotifyTypeList(MonitorNotifyType.toList());
alarmConstantVO.setMetricTypeList(MonitorMetricsType.toList());
return new Result<>(alarmConstantVO);
}
}

View File

@@ -0,0 +1,258 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.common.entity.dto.analysis.AnalysisBrokerDTO;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.PartitionState;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.service.service.*;
import com.xiaojukeji.kafka.manager.web.converters.BrokerModelConverter;
import com.xiaojukeji.kafka.manager.common.constant.MetricsType;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.dto.BrokerBasicDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.BrokerOverallDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.BrokerOverviewDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.TopicOverviewDTO;
import com.xiaojukeji.kafka.manager.common.entity.metrics.BrokerMetrics;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.po.RegionDO;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.BrokerMetadata;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.service.cache.ClusterMetadataManager;
import com.xiaojukeji.kafka.manager.web.converters.TopicModelConverter;
import com.xiaojukeji.kafka.manager.web.vo.broker.*;
import com.xiaojukeji.kafka.manager.web.vo.topic.TopicOverviewVO;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import org.apache.commons.lang.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.*;
/**
* BrokerController
* @author zengqiao
* @date 19/4/3
*/
@Api(value = "BrokerController", description = "Broker相关接口")
@Controller
@RequestMapping("api/v1/")
public class BrokerController {
private final static Logger logger = LoggerFactory.getLogger(BrokerController.class);
@Autowired
private ClusterService clusterService;
@Autowired
private BrokerService brokerService;
@Autowired
private TopicService topicService;
@Autowired
private RegionService regionService;
@Autowired
private AnalysisService analysisService;
@ApiOperation(value = "Broker概览", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = BrokerOverviewVO.class)
@RequestMapping(value = "{clusterId}/brokers/overview", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<BrokerOverviewVO>> getBrokerOverview(@ApiParam(name = "clusterId", required = true, value = "集群ID") @PathVariable Long clusterId) {
if (clusterId == null) {
return new Result<>(StatusCode.PARAM_ERROR, "clusterId illegal");
}
List<RegionDO> regionDOList = new ArrayList<>();
List<BrokerOverviewDTO> brokerOverviewDTOList = null;
try {
brokerOverviewDTOList = brokerService.getBrokerOverviewList(clusterId, BrokerMetrics.getFieldNameList(MetricsType.BROKER_OVER_VIEW_METRICS), true);
regionDOList = regionService.getByClusterId(clusterId);
} catch (Exception e) {
logger.error("getBrokerOverview@BrokerController, get failed, clusterId:{}.", clusterId);
}
return new Result<>(BrokerModelConverter.convert2BrokerOverviewList(brokerOverviewDTOList, regionDOList));
}
@ApiOperation(value = "Broker总揽", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = BrokerOverallVO.class)
@RequestMapping(value = "{clusterId}/brokers/overall", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<BrokerOverallVO>> getBrokersOverall(@PathVariable Long clusterId) {
if (clusterId == null || clusterId < 0) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
List<BrokerOverallDTO> brokerOverallDTOList = brokerService.getBrokerOverallList(clusterId, BrokerMetrics.getFieldNameList(MetricsType.BROKER_OVER_ALL_METRICS));
if (brokerOverallDTOList == null) {
return new Result<>();
}
List<RegionDO> regionDOList = regionService.getByClusterId(clusterId);
return new Result<>(BrokerModelConverter.convert2BrokerOverallVOList(clusterId, brokerOverallDTOList, regionDOList));
}
@ApiOperation(value = "集群Broker元信息列表", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = BrokerMetadataVO.class)
@RequestMapping(value = "{clusterId}/brokers/broker-metadata", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<BrokerMetadataVO>> getBrokerMetadataList(@ApiParam(name = "clusterId", required = true, value = "集群ID") @PathVariable Long clusterId) {
if (clusterId == null) {
return new Result<>(StatusCode.PARAM_ERROR, "clusterId illegal");
}
List<Integer> brokerIdList = ClusterMetadataManager.getBrokerIdList(clusterId);
List<BrokerMetadataVO> brokerMetadataVOList = new ArrayList<>();
for (Integer brokerId: brokerIdList) {
BrokerMetadata brokerMetadata = ClusterMetadataManager.getBrokerMetadata(clusterId, brokerId);
if (brokerMetadata == null) {
continue;
}
brokerMetadataVOList.add(new BrokerMetadataVO(brokerMetadata.getBrokerId(), brokerMetadata.getHost()));
}
return new Result<>(brokerMetadataVOList);
}
@ApiOperation(value = "获取Broker基本信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = BrokerBasicVO.class)
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/basic-info", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<BrokerBasicVO> getBrokerBasicInfo(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId, @ApiParam(name = "brokerId", required = true, value = "BrokerId") @PathVariable Integer brokerId) {
if (clusterId < 0 || brokerId < 0) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
BrokerBasicDTO brokerBasicDTO = brokerService.getBrokerBasicDTO(clusterId, brokerId, BrokerMetrics.getFieldNameList(5));
if (brokerBasicDTO == null) {
return new Result<>(StatusCode.PARAM_ERROR, "Broker不存在");
}
BrokerBasicVO brokerBasicVO = new BrokerBasicVO();
CopyUtils.copyProperties(brokerBasicVO, brokerBasicDTO);
return new Result<>(brokerBasicVO);
}
@ApiOperation(value = "获取Broker上的分区信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = BrokerPartitionsVO.class)
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/partitions", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<BrokerPartitionsVO>> getBrokerPartitions(@PathVariable Long clusterId, @PathVariable Integer brokerId) {
if (clusterId == null || clusterId < 0 || brokerId == null || brokerId < 0) {
logger.error("getBrokerPartitions@BrokerController, parameter is invalid.");
return new Result<>(StatusCode.PARAM_ERROR, "参数不合法");
}
Map<String, List<PartitionState>> partitionStateMap = null;
try {
partitionStateMap = topicService.getTopicPartitionState(clusterId, brokerId);
} catch (Exception e) {
logger.error("getBrokerPartitions@BrokerController, get BROKER topic partition state failed, clusterId:{} brokerId:{}.", clusterId, brokerId, e);
return new Result<>(StatusCode.PARAM_ERROR, "get BROKER topic partition state error");
}
if (partitionStateMap == null) {
logger.info("getBrokerPartitions@BrokerController, BROKER is empty none topic in this BROKER, clusterId:{} brokerId:{}.", clusterId, brokerId);
return new Result<>();
}
return new Result<>(BrokerModelConverter.convert2BrokerPartitionsVOList(clusterId, brokerId, partitionStateMap));
}
@ApiOperation(value = "获取Broker实时流量信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = BrokerStatusVO.class)
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/metrics", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<BrokerStatusVO> getBrokerMetrics(@PathVariable Long clusterId, @PathVariable Integer brokerId) {
if (clusterId == null || brokerId == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
BrokerMetrics brokerMetrics = brokerService.getSpecifiedBrokerMetrics(clusterId, brokerId, BrokerMetrics.getFieldNameList(MetricsType.BROKER_REAL_TIME_METRICS), false);
if (brokerMetrics == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, broker not exist");
}
List<BrokerMetrics> brokerMetricsList = new ArrayList<>();
brokerMetricsList.add(brokerMetrics);
return new Result<>(BrokerModelConverter.convertBroker2BrokerMetricsVO(brokerMetricsList));
}
@ApiOperation(value = "获取Broker历史流量信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = BrokerMetricsVO.class)
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/metrics-history", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<BrokerMetricsVO>> getBrokerMetricsHistory(@PathVariable Long clusterId, @PathVariable Integer brokerId, @RequestParam("startTime") Long startTime, @RequestParam("endTime") Long endTime) {
if (clusterId == null || brokerId == null || startTime == null || endTime == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
List<BrokerMetrics> brokerMetricsList = brokerService.getBrokerMetricsByInterval(clusterId, brokerId, new Date(startTime), new Date(endTime));
return new Result<>(BrokerModelConverter.convert2BrokerMetricsVOList(brokerMetricsList));
}
@ApiOperation(value = "获取Broker上的Topic列表", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicOverviewVO.class)
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/topics", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<TopicOverviewVO>> getBrokerTopics(@PathVariable Long clusterId, @PathVariable Integer brokerId) {
if (clusterId == null || clusterId < 0 || brokerId == null || brokerId < 0) {
logger.error("getTopicListByBroker@BrokerController, parameter is invalid.");
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO cluster = clusterService.getById(clusterId);
if (cluster == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
List<TopicOverviewDTO> topicOverviewDTOList;
try {
topicOverviewDTOList = topicService.getTopicOverviewDTOList(cluster.getId(), brokerId, null);
} catch (Exception e) {
logger.error("getTopicListByBroker@BrokerController, get topics error, clusterId:{} brokerId:{}.", clusterId, brokerId, e);
return new Result<>(StatusCode.PARAM_ERROR, "getTopicListByBroker error");
}
if (topicOverviewDTOList == null || topicOverviewDTOList.isEmpty()) {
return new Result<>(StatusCode.PARAM_ERROR, "topics is null. clusterId is " + clusterId + ", brokerId is " + brokerId);
}
return new Result<>(TopicModelConverter.convert2TopicOverviewVOList(cluster, topicOverviewDTOList, null, null));
}
@ApiOperation(value = "删除指定Broker", httpMethod = "DELETE", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "{clusterId}/brokers/{brokerId}", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result deleteBroker(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId,
@ApiParam(name = "brokerId", required = true, value = "Broker的Id") @PathVariable Integer brokerId) {
if (clusterId < 0 || brokerId < 0) {
logger.error("deleteBrokerInfo@BrokerController, param illegal");
return new Result(StatusCode.PARAM_ERROR, "clusterId or brokerId illegal");
}
return new Result(StatusCode.MY_SQL_DELETE_ERROR, "delete BROKER failed");
}
@ApiOperation(value = "Broker关键指标", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = BrokerKeyMetricsVO.class)
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/key-metrics", method = RequestMethod.GET, produces = {"application/json"})
@ResponseBody
public Result<Map<String, List<BrokerKeyMetricsVO>>> getBrokerKeyMetrics(@PathVariable Long clusterId, @PathVariable Integer brokerId, @RequestParam("startTime") Long startTime, @RequestParam("endTime") Long endTime) {
if (clusterId == null || brokerId == null || startTime == null || endTime == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
List<BrokerMetrics> todayBrokerMetricsList = brokerService.getBrokerMetricsByInterval(clusterId, brokerId, new Date(startTime), new Date(endTime));
List<BrokerMetrics> yesterdayBrokerMetricsList = brokerService.getBrokerMetricsByInterval(clusterId, brokerId, DateUtils.addDays(new Date(startTime), -1), DateUtils.addDays(new Date(endTime), -1));
List<BrokerMetrics> lastWeekBrokerMetricsList = brokerService.getBrokerMetricsByInterval(clusterId, brokerId, DateUtils.addDays(new Date(startTime), -7), DateUtils.addDays(new Date(endTime), -7));
Map<String, List<BrokerKeyMetricsVO>> healthMap = new HashMap<>();
healthMap.put("today", BrokerModelConverter.convert2BrokerKeyMetricsVOList(todayBrokerMetricsList));
healthMap.put("yesterday", BrokerModelConverter.convert2BrokerKeyMetricsVOList(yesterdayBrokerMetricsList));
healthMap.put("lastWeek", BrokerModelConverter.convert2BrokerKeyMetricsVOList(lastWeekBrokerMetricsList));
return new Result<>(healthMap);
}
@ApiOperation(value = "BrokerTopic分析", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = AnalysisBrokerVO.class)
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/analysis", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<AnalysisBrokerVO> getTopicAnalyzer(@PathVariable Long clusterId, @PathVariable Integer brokerId) {
if (clusterId == null || clusterId < 0 || brokerId == null || brokerId < 0) {
return new Result<>(StatusCode.PARAM_ERROR,"param illegal, please check clusterId and brokerId");
}
AnalysisBrokerDTO analysisBrokerDTO = analysisService.doAnalysisBroker(clusterId, brokerId);
return new Result<>(BrokerModelConverter.convert2AnalysisBrokerVO(analysisBrokerDTO));
}
}

View File

@@ -0,0 +1,214 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.web.converters.BrokerModelConverter;
import com.xiaojukeji.kafka.manager.web.model.ClusterModel;
import com.xiaojukeji.kafka.manager.web.vo.cluster.ClusterBasicVO;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.constant.MetricsType;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.metrics.BrokerMetrics;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterMetricsDO;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.po.ControllerDO;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.DateUtils;
import com.xiaojukeji.kafka.manager.service.service.BrokerService;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.service.service.ConsumerService;
import com.xiaojukeji.kafka.manager.service.service.RegionService;
import com.xiaojukeji.kafka.manager.service.utils.SpringContextHolder;
import com.xiaojukeji.kafka.manager.web.converters.ClusterModelConverter;
import com.xiaojukeji.kafka.manager.web.vo.KafkaControllerVO;
import com.xiaojukeji.kafka.manager.web.vo.broker.BrokerStatusVO;
import com.xiaojukeji.kafka.manager.web.vo.cluster.ClusterMetricsVO;
import com.xiaojukeji.kafka.manager.web.vo.cluster.ClusterDetailVO;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* ClusterController
* @author zengqiao
* @date 19/4/3
*/
@Api(value = "ClusterController", description = "Cluster相关接口")
@Controller
@RequestMapping("api/v1/")
public class ClusterController {
private final static Logger logger = LoggerFactory.getLogger(ClusterController.class);
@Autowired
private ClusterService clusterService;
@Autowired
private RegionService regionService;
@Autowired
private ConsumerService consumerService;
@Autowired
private BrokerService brokerService;
@ApiOperation(value = "Kafka版本列表", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = String.class)
@RequestMapping(value = "clusters/kafka-version", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<String>> getKafkaVersionList() {
List<String> kafkaVersionList = Arrays.asList("0.10.0", "0.10.1", "0.10.2", "0.11.0", "1.0", "1.1", "2.0", "2.1", "2.2", "2.3");
return new Result<>(kafkaVersionList);
}
@ApiOperation(value = "集群列表(基本信息)", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = ClusterBasicVO.class)
@RequestMapping(value = "clusters/basic-info", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<ClusterBasicVO>> getBasicList() {
List<ClusterDO> clusterDOList = clusterService.listAll();
if (clusterDOList == null) {
return new Result<>(StatusCode.MY_SQL_SELECT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result<>(ClusterModelConverter.convert2ClusterBasicVOList(clusterDOList));
}
@ApiOperation(value = "集群列表(详细信息)", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = ClusterDetailVO.class)
@RequestMapping(value = "clusters", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<ClusterDetailVO>> getClusterDetailList() {
List<ClusterDO> clusterDOList = clusterService.listAll();
if (clusterDOList == null) {
return new Result<>(StatusCode.MY_SQL_SELECT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
Map<Long, Long> clusterIdRegionNumMap = regionService.getRegionNum();
Map<Long, Integer> consumerGroupNumMap = consumerService.getConsumerGroupNumMap(clusterDOList);
return new Result<>(ClusterModelConverter.convert2ClusterDetailVOList(clusterDOList, clusterIdRegionNumMap, consumerGroupNumMap));
}
@ApiOperation(value = "指定集群的信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = ClusterDetailVO.class)
@RequestMapping(value = "clusters/{clusterId}", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<ClusterBasicVO> getBasicInfo(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId) {
if (clusterId == null || clusterId < 0) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO cluster = clusterService.getById(clusterId);
if (null == cluster) {
return new Result<>(StatusCode.PARAM_ERROR, "cluster not exist");
}
return new Result<>(ClusterModelConverter.convert2ClusterBasicVO(cluster));
}
@ApiOperation(value = "添加集群", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "clusters", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result addNewCluster(@RequestBody ClusterModel reqObj) {
if (reqObj == null || !reqObj.legal()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
return clusterService.addNewCluster(ClusterModelConverter.convert2ClusterDO(reqObj), SpringContextHolder.getUserName());
}
@ApiOperation(value = "修改集群", httpMethod = "PUT", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "clusters", method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result modifyCluster(@RequestBody ClusterModel reqModel) {
if (reqModel == null || !reqModel.legal() || reqModel.getClusterId() == null) {
return new Result(StatusCode.PARAM_ERROR, "参数错误");
}
ClusterDO oldClusterDO = clusterService.getById(reqModel.getClusterId());
if (oldClusterDO == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
ClusterDO newClusterDO = ClusterModelConverter.convert2ClusterDO(reqModel);
return clusterService.updateCluster(newClusterDO, !oldClusterDO.getZookeeper().equals(newClusterDO.getZookeeper()), SpringContextHolder.getUserName());
}
@ApiOperation(value = "查询集群实时流量信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = BrokerStatusVO.class)
@RequestMapping(value = "clusters/{clusterId}/metrics", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<BrokerStatusVO> getClusterSummaryMetrics(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId) {
if (clusterId == null || clusterId < 0) {
return new Result<>(StatusCode.PARAM_ERROR, "clusterId illegal");
}
Map<Integer, BrokerMetrics> brokerMap = null;
try {
brokerMap = brokerService.getSpecifiedBrokerMetrics(clusterId, BrokerMetrics.getFieldNameList(MetricsType.BROKER_REAL_TIME_METRICS), false);
} catch (Exception e) {
logger.error("getBrokerSummaryMetrics@BrokerController, get failed, clusterId:{}.", clusterId);
}
if (brokerMap == null || brokerMap.isEmpty()) {
return new Result<>();
}
return new Result<>(BrokerModelConverter.convertBroker2BrokerMetricsVO(new ArrayList<>(brokerMap.values())));
}
@ApiOperation(value = "获取集群的历史流量信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = ClusterMetricsVO.class)
@RequestMapping(value = "clusters/{clusterId}/metrics-history", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result getClusterMetricsHistory(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId,
@ApiParam(name = "startTime", required = true, value = "起始时间") @RequestParam("startTime") Long startTime,
@ApiParam(name = "endTime", required = true, value = "截止时间") @RequestParam("endTime") Long endTime) {
if (clusterId == null || startTime == null || endTime == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO cluster = clusterService.getById(clusterId);
if (cluster == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, cluster not EXIST");
}
List<ClusterMetricsDO> clusterMetricsList = null;
try {
clusterMetricsList = clusterService.getClusterMetricsByInterval(clusterId, DateUtils.long2Date(startTime), DateUtils.long2Date(endTime));
} catch (Exception e) {
logger.error("getClusterMetricsHistory@ClusterController, select mysql:cluster_metrics failed, clusterId:{}.", clusterId, e);
}
if (clusterMetricsList == null) {
return new Result(StatusCode.MY_SQL_SELECT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
List<ClusterMetricsVO> result = new ArrayList<>();
for(ClusterMetricsDO metrics : clusterMetricsList){
ClusterMetricsVO vo = new ClusterMetricsVO();
CopyUtils.copyProperties(vo,metrics);
vo.setGmtCreate(metrics.getGmtCreate().getTime());
result.add(vo);
}
return new Result<>(result);
}
@ApiOperation(value = "集群Controller变更历史", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = KafkaControllerVO.class)
@RequestMapping(value = "clusters/{clusterId}/controller-history", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<KafkaControllerVO>> getControllerHistory(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId) {
if (clusterId == null || clusterId <= 0) {
return new Result<>(StatusCode.PARAM_ERROR,"param illegal");
}
List<ControllerDO> controllerDOList = null;
try {
controllerDOList = clusterService.getKafkaControllerHistory(clusterId);
} catch (Exception e) {
logger.error("getControllerHistory@ClusterController, get failed, clusterId:{}.", clusterId, e);
return new Result<>(StatusCode.MY_SQL_SELECT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result<>(ClusterModelConverter.convert2KafkaControllerVOList(controllerDOList));
}
}

View File

@@ -0,0 +1,168 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.web.converters.ConsumerModelConverter;
import com.xiaojukeji.kafka.manager.web.vo.consumer.ConsumerGroupDetailVO;
import com.xiaojukeji.kafka.manager.web.vo.consumer.ConsumerGroupVO;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.constant.OffsetStoreLocation;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.dto.consumer.ConsumeDetailDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.consumer.ConsumerGroupDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.PartitionOffsetDTO;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterDO;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.service.service.ConsumerService;
import com.xiaojukeji.kafka.manager.service.service.TopicService;
import com.xiaojukeji.kafka.manager.web.model.OffsetResetModel;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 19/5/14
*/
@Api(value = "ConsumerController", description = "Consumer相关接口")
@Controller
@RequestMapping("/api/v1/")
public class ConsumerController {
private static final Logger logger = LoggerFactory.getLogger(ConsumerController.class);
@Autowired
private ConsumerService consumerService;
@Autowired
private ClusterService clusterService;
@Autowired
private TopicService topicService;
@ApiOperation(value = "获取消费组列表", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = ConsumerGroupVO.class)
@RequestMapping(value = "{clusterId}/consumers/consumer-groups", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<ConsumerGroupVO>> getConsumerGroupList(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId) {
if (clusterId == null || clusterId <= 0) {
return new Result<>(StatusCode.PARAM_ERROR, "params illegal");
}
ClusterDO cluster = clusterService.getById(clusterId);
if (cluster == null) {
return new Result<>(StatusCode.PARAM_ERROR, "params illegal, cluster not exist");
}
return new Result<>(ConsumerModelConverter.convert2ConsumerGroupVOList(consumerService.getConsumerGroupList(cluster.getId())));
}
@ApiOperation(value = "查询消费Topic的消费组", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = ConsumerGroupVO.class)
@RequestMapping(value = "{clusterId}/topics/{topicName}/consumer-groups", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<ConsumerGroupVO>> getTopicConsumerGroup(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId,
@ApiParam(name = "topicName", required = true, value = "Topic名称") @PathVariable String topicName) {
if (clusterId == null || clusterId < 0 || StringUtils.isEmpty(topicName)) {
return new Result<>(StatusCode.PARAM_ERROR, "params illegal");
}
ClusterDO cluster = clusterService.getById(clusterId);
if (cluster == null) {
return new Result<>(StatusCode.PARAM_ERROR, "params illegal, cluster not exist");
}
return new Result<>(ConsumerModelConverter.convert2ConsumerGroupVOList(consumerService.getConsumerGroupList(cluster.getId(), topicName)));
}
@ApiOperation(value = "查询消费组的消费详情", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = ConsumerGroupDetailVO.class)
@RequestMapping(value = "{clusterId}/consumers/{consumerGroup}/topics/{topicName}/consume-detail", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<ConsumerGroupDetailVO>> getConsumerGroupConsumeDetail(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId,
@ApiParam(name = "consumerGroup", required = true, value = "消费组") @PathVariable String consumerGroup,
@ApiParam(name = "topicName", required = true, value = "Topic名称") @PathVariable String topicName,
@ApiParam(name = "location", required = true, value = "存储位置") @RequestParam("location") String location) {
if (clusterId < 0 || StringUtils.isEmpty(topicName) || StringUtils.isEmpty(consumerGroup) || StringUtils.isEmpty(location)) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO cluster = clusterService.getById(clusterId);
if (cluster == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
location = location.toLowerCase();
OffsetStoreLocation offsetStoreLocation = OffsetStoreLocation.getOffsetStoreLocation(location);
if (offsetStoreLocation == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, offset location illegal");
}
ConsumerGroupDTO consumeGroupDTO = new ConsumerGroupDTO(cluster.getId(), consumerGroup, offsetStoreLocation);
try {
List<ConsumeDetailDTO> consumeDetailDTOList = consumerService.getConsumeDetail(cluster, topicName, consumeGroupDTO);
return new Result<>(ConsumerModelConverter.convert2ConsumerGroupDetailVO(clusterId, topicName, consumerGroup, location, consumeDetailDTOList));
} catch (Exception e) {
logger.error("getConsumerGroupConsumeDetail@ConsumerControlller, get consume detail failed, consumerGroup:{}.", consumeGroupDTO, e);
}
return new Result<>(StatusCode.RES_UNREADY, Constant.KAFKA_MANAGER_INNER_ERROR + ", get consume detail failed");
}
@ApiOperation(value = "查询消费组消费的Topic", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = String.class)
@RequestMapping(value = "{clusterId}/consumer/{consumerGroup}/topics", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<String>> getConsumerGroupConsumedTopicList(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId,
@ApiParam(name = "consumerGroup", required = true, value = "消费组") @PathVariable String consumerGroup,
@ApiParam(name = "location", required = true, value = "消费组存储位置") @RequestParam("location") String location) {
if (clusterId < 0 || StringUtils.isEmpty(consumerGroup) || StringUtils.isEmpty(location)) {
return new Result<>(StatusCode.PARAM_ERROR, "params illegal");
}
ClusterDO cluster = clusterService.getById(clusterId);
if (cluster == null) {
return new Result<>(StatusCode.PARAM_ERROR, "params illegal, cluster not exist");
}
location = location.toLowerCase();
OffsetStoreLocation offsetStoreLocation = OffsetStoreLocation.getOffsetStoreLocation(location);
if (offsetStoreLocation == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, offset location illegal");
}
ConsumerGroupDTO consumeGroupDTO = new ConsumerGroupDTO(cluster.getId(), consumerGroup, offsetStoreLocation);
return new Result<>(consumerService.getConsumerGroupConsumedTopicList(cluster, consumeGroupDTO));
}
@ApiOperation(value = "重置消费偏移", httpMethod = "PUT", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "consumers/offsets", method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<Result>> resetConsumeOffsets(@RequestBody OffsetResetModel offsetResetModel) {
if (offsetResetModel == null || !offsetResetModel.legal()) {
return new Result<>(StatusCode.PARAM_ERROR,"param illegal");
}
ClusterDO cluster = clusterService.getById(offsetResetModel.getClusterId());
if (cluster == null) {
return new Result<>(StatusCode.PARAM_ERROR,"param illegal, cluster not exist");
}
List<PartitionOffsetDTO> partitionOffsetDTOList = offsetResetModel.getOffsetList();
if (offsetResetModel.getTimestamp() != null) {
partitionOffsetDTOList = topicService.getPartitionOffsetList(cluster, offsetResetModel.getTopicName(), offsetResetModel.getTimestamp());
}
if (partitionOffsetDTOList == null || partitionOffsetDTOList.isEmpty()) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, ");
}
ConsumerGroupDTO consumerGroupDTO = new ConsumerGroupDTO(cluster.getId(), offsetResetModel.getConsumerGroup(), OffsetStoreLocation.getOffsetStoreLocation(offsetResetModel.getLocation()));
List<Result> resultList = consumerService.resetConsumerOffset(cluster, offsetResetModel.getTopicName(), consumerGroupDTO, partitionOffsetDTOList);
for (Result result: resultList) {
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return new Result<>(StatusCode.OPERATION_ERROR, resultList, "operator failed");
}
}
return new Result<>(resultList);
}
}

View File

@@ -0,0 +1,69 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.AccountRoleEnum;
import com.xiaojukeji.kafka.manager.service.service.LoginService;
import com.xiaojukeji.kafka.manager.web.model.LoginModel;
import com.xiaojukeji.kafka.manager.web.vo.AccountVO;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
/**
* 登陆
* @author huangyiminghappy@163.com
* @date 19/4/29
*/
@Api(value = "LoginController", description = "Login相关接口")
@Controller
@RequestMapping("")
public class LoginController {
private final static Logger logger = LoggerFactory.getLogger(LoginController.class);
@Autowired
private LoginService loginService;
@ApiOperation(value = "登陆", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = AccountVO.class)
@RequestMapping(value = "api/v1/login/login", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result login(HttpServletRequest request, @RequestBody LoginModel loginModel){
if (loginModel == null || !loginModel.legal()) {
return new Result(StatusCode.PARAM_ERROR, "参数错误");
}
AccountRoleEnum accountRoleEnum = null;
try {
Result result = loginService.login(request, loginModel.getUsername(), loginModel.getPassword());
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return result;
}
accountRoleEnum = (AccountRoleEnum) result.getData();
} catch (Exception e) {
logger.error("login@LoginController, login failed, req:{}.", loginModel, e);
return new Result(StatusCode.PARAM_ERROR, "param error");
}
return new Result<>(new AccountVO(loginModel.getUsername(), null, accountRoleEnum.getRole()));
}
@ApiOperation(value = "登出", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "api/v1/login/logoff", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result logoff(HttpServletRequest request, @RequestParam(value="username") String username) {
if (StringUtils.isEmpty(username)) {
return new Result(StatusCode.PARAM_ERROR, "param error");
}
return loginService.logoff(request, username);
}
}

View File

@@ -0,0 +1,353 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.common.entity.po.*;
import com.xiaojukeji.kafka.manager.service.service.*;
import com.xiaojukeji.kafka.manager.web.model.order.OrderPartitionExecModel;
import com.xiaojukeji.kafka.manager.web.model.order.OrderPartitionModel;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.AdminTopicStatusEnum;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.metrics.TopicMetrics;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.OrderStatusEnum;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.OrderTypeEnum;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.TopicMetadata;
import com.xiaojukeji.kafka.manager.service.cache.ClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.utils.ListUtils;
import com.xiaojukeji.kafka.manager.service.utils.SpringContextHolder;
import com.xiaojukeji.kafka.manager.web.converters.OrderConverter;
import com.xiaojukeji.kafka.manager.web.model.order.OrderTopicExecModel;
import com.xiaojukeji.kafka.manager.web.model.order.OrderTopicModel;
import com.xiaojukeji.kafka.manager.web.vo.topic.TopicOverviewVO;
import com.xiaojukeji.kafka.manager.web.vo.order.OrderPartitionVO;
import com.xiaojukeji.kafka.manager.web.vo.order.OrderTopicVO;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.*;
/**
* @author zengqiao
* @date 19/6/2
*/
@Api(value = "OrderController", description = "工单相关接口")
@Controller
@RequestMapping("api/v1/")
public class OrderController {
private final static Logger logger = LoggerFactory.getLogger(OrderController.class);
@Autowired
private ClusterService clusterService;
@Autowired
private OrderService orderService;
@Autowired
private AdminTopicService adminTopicService;
@Autowired
private RegionService regionService;
@Autowired
private TopicService topicService;
@ApiOperation(value = "Topic申请", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "orders/topic", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result createOrderTopic(@RequestBody OrderTopicModel reqObj) {
if (reqObj == null || !reqObj.createLegal()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO clusterDO = clusterService.getById(reqObj.getClusterId());
if (clusterDO == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
if (ClusterMetadataManager.isTopicExist(clusterDO.getId(), reqObj.getTopicName())) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, topic already exist");
}
try {
String username = SpringContextHolder.getUserName();
if (!orderService.createOrderTopic(OrderConverter.convert2OrderTopicDO(clusterDO, username, reqObj))) {
return new Result<>(StatusCode.MY_SQL_INSERT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
} catch (Exception e) {
logger.error("createOrderTopic@OrderController, create failed, req:{}.", reqObj, e);
return new Result<>(StatusCode.MY_SQL_INSERT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result<>();
}
@ApiOperation(value = "Topic工单撤销", httpMethod = "DELETE", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicOverviewVO.class)
@RequestMapping(value = "orders/topic", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result cancelApplyTopic(@RequestParam("orderId") Long orderId) {
String username = SpringContextHolder.getUserName();
if (orderId == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
try {
return orderService.cancelOrder(orderId, username, OrderTypeEnum.APPLY_TOPIC);
} catch (Exception e) {
logger.error("cancelApplyTopic@OrderController, update failed, username:{} orderId:{}.", username, orderId, e);
return new Result<>(StatusCode.MY_SQL_UPDATE_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
}
@ApiOperation(value = "Topic工单查看", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = OrderTopicVO.class)
@RequestMapping(value = "orders/topic", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result getOrderTopic() {
String username = SpringContextHolder.getUserName();
List<OrderTopicDO> orderTopicDOList = null;
try {
orderTopicDOList = orderService.getOrderTopics(username);
} catch (Exception e) {
logger.error("getOrderTopic@OrderController, get failed, username:{}.", username, e);
return new Result<>(StatusCode.MY_SQL_INSERT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result<>(OrderConverter.convert2OrderTopicVOList(orderTopicDOList));
}
@ApiOperation(value = "Topic所有工单查看[admin]", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = OrderTopicVO.class)
@RequestMapping(value = "admin/orders/topic", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<OrderTopicVO>> getAllApplyTopic() {
List<OrderTopicDO> orderTopicDOList = null;
try {
orderTopicDOList = orderService.getOrderTopics(null);
} catch (Exception e) {
logger.error("getOrderTopic@OrderController, get failed.", e);
return new Result<>(StatusCode.MY_SQL_INSERT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result<>(OrderConverter.convert2OrderTopicVOList(orderTopicDOList));
}
@ApiOperation(value = "Topic工单执行[通过/拒绝]", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "admin/orders/topic", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result executeApplyTopic(@RequestBody OrderTopicExecModel reqObj) {
Result result = OrderTopicExecModel.illegal(reqObj);
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return result;
}
OrderTopicDO orderTopicDO = orderService.getOrderTopicById(reqObj.getOrderId());
if (orderTopicDO == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, order not exist");
} else if (!OrderStatusEnum.WAIT_DEAL.getCode().equals(orderTopicDO.getOrderStatus())) {
return new Result(StatusCode.OPERATION_ERROR, "order already handled");
}
ClusterDO clusterDO = clusterService.getById(orderTopicDO.getClusterId());
if (clusterDO == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
String username = SpringContextHolder.getUserName();
orderTopicDO.setBrokers((reqObj.getBrokerIdList() == null || reqObj.getBrokerIdList().isEmpty())? "": ListUtils.intList2String(reqObj.getBrokerIdList()));
orderTopicDO.setRegions((reqObj.getRegionIdList() == null || reqObj.getRegionIdList().isEmpty())? "": ListUtils.longList2String(reqObj.getRegionIdList()));
orderTopicDO.setApprover(username);
orderTopicDO.setOpinion(reqObj.getApprovalOpinions());
orderTopicDO.setOrderStatus(reqObj.getOrderStatus());
if (OrderStatusEnum.PASSED.getCode().equals(reqObj.getOrderStatus())) {
result = createTopic(clusterDO, reqObj, orderTopicDO);
}
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return result;
}
result = orderService.modifyOrderTopic(orderTopicDO, username, true);
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return new Result(StatusCode.OPERATION_ERROR, "create topic success, but update order status failed, err:" + result.getMessage());
}
return new Result();
}
private Result createTopic(ClusterDO clusterDO, OrderTopicExecModel reqObj, OrderTopicDO orderTopicDO) {
TopicDO topicInfoDO = OrderConverter.convert2TopicInfoDO(orderTopicDO);
List<Integer> brokerIdList = regionService.getFullBrokerId(clusterDO.getId(), reqObj.getRegionIdList(), reqObj.getBrokerIdList());
Properties topicConfig = new Properties();
topicConfig.setProperty("retention.ms", String.valueOf(reqObj.getRetentionTime()));
try {
TopicMetadata topicMetadata = new TopicMetadata();
topicMetadata.setTopic(orderTopicDO.getTopicName());
topicMetadata.setReplicaNum(reqObj.getReplicaNum());
topicMetadata.setPartitionNum(reqObj.getPartitionNum());
topicMetadata.setBrokerIdSet(new HashSet<>(brokerIdList));
AdminTopicStatusEnum adminTopicStatusEnum = adminTopicService.createTopic(clusterDO, topicMetadata, topicInfoDO, topicConfig, SpringContextHolder.getUserName());
if (!AdminTopicStatusEnum.SUCCESS.equals(adminTopicStatusEnum)) {
return new Result(StatusCode.OPERATION_ERROR, adminTopicStatusEnum.getMessage());
}
} catch (Exception e) {
logger.error("executeApplyTopic@OrderController, create failed, req:{}.", reqObj);
return new Result(StatusCode.OPERATION_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result();
}
@ApiOperation(value = "partition申请", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "orders/partition", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result applyPartition(@RequestBody OrderPartitionModel reqObj) {
if (reqObj == null || !reqObj.createLegal()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO clusterDO = clusterService.getById(reqObj.getClusterId());
if (clusterDO == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
if (!ClusterMetadataManager.isTopicExist(clusterDO.getId(), reqObj.getTopicName())) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, topic not exist");
}
try {
if (!orderService.createOrderPartition(OrderConverter.convert2OrderPartitionDO(clusterDO, SpringContextHolder.getUserName(), reqObj))) {
return new Result<>(StatusCode.MY_SQL_INSERT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
} catch (Exception e) {
logger.error("applyPartition@OrderController, create failed, req:{}.", reqObj, e);
return new Result<>(StatusCode.MY_SQL_INSERT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result<>();
}
@ApiOperation(value = "partition工单撤销", httpMethod = "DELETE", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "orders/partition", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result cancelApplyPartition(@RequestParam("orderId") Long orderId) {
if (orderId == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
try {
return orderService.cancelOrder(orderId, SpringContextHolder.getUserName(), OrderTypeEnum.APPLY_PARTITION);
} catch (Exception e) {
logger.error("cancelApplyPartition@OrderController, update failed, username:{} orderId:{}.", SpringContextHolder.getUserName(), orderId, e);
return new Result<>(StatusCode.MY_SQL_UPDATE_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
}
@ApiOperation(value = "partition工单查看", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = OrderPartitionVO.class)
@RequestMapping(value = "orders/partition", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<OrderPartitionVO>> getApplyPartitionList(@RequestParam(value = "orderId", required = false) Long orderId) {
List<OrderPartitionDO> orderPartitionDOList = null;
try {
orderPartitionDOList = orderService.getOrderPartitions(SpringContextHolder.getUserName(), orderId);
} catch (Exception e) {
logger.error("getApplyPartition@OrderController, get failed, username:{}.", SpringContextHolder.getUserName(), e);
return new Result<>(StatusCode.MY_SQL_INSERT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result<>(OrderConverter.convert2OrderPartitionVOList(orderPartitionDOList));
}
@ApiOperation(value = "partition工单查看[Admin]", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = OrderPartitionVO.class)
@RequestMapping(value = "admin/orders/partition", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<OrderPartitionVO>> adminGetApplyPartition(@RequestParam(value = "orderId", required = false) Long orderId) {
List<OrderPartitionDO> orderPartitionDOList = null;
try {
orderPartitionDOList = orderService.getOrderPartitions(null, orderId);
} catch (Exception e) {
logger.error("adminGetApplyPartition@OrderController, get failed.", e);
return new Result<>(StatusCode.MY_SQL_INSERT_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
if (orderId == null || orderPartitionDOList.isEmpty()) {
return new Result<>(OrderConverter.convert2OrderPartitionVOList(orderPartitionDOList));
}
return new Result<>(supplyExternalInfo(orderPartitionDOList));
}
private List<OrderPartitionVO> supplyExternalInfo(List<OrderPartitionDO> orderPartitionDOList) {
if (orderPartitionDOList == null || orderPartitionDOList.isEmpty()) {
return new ArrayList<>();
}
List<OrderPartitionVO> orderPartitionVOList = new ArrayList<>();
for (OrderPartitionDO orderPartitionDO: orderPartitionDOList) {
TopicMetadata topicMetadata = ClusterMetadataManager.getTopicMetaData(orderPartitionDO.getClusterId(), orderPartitionDO.getTopicName());
if (topicMetadata == null) {
// Topic不存在
continue;
}
// 获取Topic的峰值均值流量
Date startTime = new Date(System.currentTimeMillis() - (24 * 60 * 60 * 1000));
Date endTime = new Date();
List<TopicMetrics> topicMetricsList = topicService.getTopicMetricsByInterval(orderPartitionDO.getClusterId(), orderPartitionDO.getTopicName(), startTime, endTime);
Long maxAvgBytes = topicService.calTopicMaxAvgBytesIn(topicMetricsList, 10);
// 获取Topic所处的Region信息
List<RegionDO> regionDOList = regionService.getRegionByTopicName(orderPartitionDO.getClusterId(), orderPartitionDO.getTopicName());
orderPartitionVOList.add(OrderConverter.convert2OrderPartitionVO(orderPartitionDO, topicMetadata, maxAvgBytes, regionDOList));
}
return orderPartitionVOList;
}
@ApiOperation(value = "partition工单执行[通过/拒绝]", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "admin/orders/partition", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result executeApplyPartition(@RequestBody OrderPartitionExecModel reqObj) {
String username = SpringContextHolder.getUserName();
Result result = OrderPartitionExecModel.illegal(reqObj);
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return result;
}
OrderPartitionDO orderPartitionDO = orderService.getOrderPartitionById(reqObj.getOrderId());
if (orderPartitionDO == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, order not exist");
} else if (!OrderStatusEnum.WAIT_DEAL.getCode().equals(orderPartitionDO.getOrderStatus())) {
return new Result(StatusCode.OPERATION_ERROR, "order already handled");
}
ClusterDO clusterDO = clusterService.getById(orderPartitionDO.getClusterId());
if (clusterDO == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
if (OrderStatusEnum.PASSED.getCode().equals(reqObj.getOrderStatus())) {
result = expandTopic(clusterDO, reqObj, orderPartitionDO);
}
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return result;
}
orderPartitionDO.setApprover(username);
orderPartitionDO.setOpinion(reqObj.getApprovalOpinions());
orderPartitionDO.setOrderStatus(reqObj.getOrderStatus());
result = orderService.modifyOrderPartition(orderPartitionDO, username);
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return new Result(StatusCode.OPERATION_ERROR, "create topic success, but update order status failed, err:" + result.getMessage());
}
return new Result();
}
private Result expandTopic(ClusterDO clusterDO, OrderPartitionExecModel reqObj, OrderPartitionDO orderPartitionDO) {
List<Integer> brokerIdList = regionService.getFullBrokerId(clusterDO.getId(), reqObj.getRegionIdList(), reqObj.getBrokerIdList());
try {
TopicMetadata topicMetadata = new TopicMetadata();
topicMetadata.setTopic(orderPartitionDO.getTopicName());
topicMetadata.setBrokerIdSet(new HashSet<>(brokerIdList));
topicMetadata.setPartitionNum(reqObj.getPartitionNum());
AdminTopicStatusEnum adminTopicStatusEnum = adminTopicService.expandTopic(clusterDO, topicMetadata, SpringContextHolder.getUserName());
if (!AdminTopicStatusEnum.SUCCESS.equals(adminTopicStatusEnum)) {
return new Result(StatusCode.OPERATION_ERROR, adminTopicStatusEnum.getMessage());
}
} catch (Exception e) {
logger.error("expandTopic@OrderController, create failed, req:{}.", reqObj);
return new Result(StatusCode.OPERATION_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result();
}
}

View File

@@ -0,0 +1,364 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.constant.MetricsType;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.dto.PartitionOffsetDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.TopicBasicDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.TopicOverviewDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.TopicPartitionDTO;
import com.xiaojukeji.kafka.manager.common.entity.metrics.TopicMetrics;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.po.RegionDO;
import com.xiaojukeji.kafka.manager.common.entity.po.TopicDO;
import com.xiaojukeji.kafka.manager.common.entity.po.TopicFavoriteDO;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.TopicMetadata;
import com.xiaojukeji.kafka.manager.service.cache.ClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.service.service.RegionService;
import com.xiaojukeji.kafka.manager.service.service.TopicManagerService;
import com.xiaojukeji.kafka.manager.service.service.TopicService;
import com.xiaojukeji.kafka.manager.service.utils.SpringContextHolder;
import com.xiaojukeji.kafka.manager.web.model.topic.TopicDataSampleModel;
import com.xiaojukeji.kafka.manager.web.model.topic.TopicFavorite;
import com.xiaojukeji.kafka.manager.web.model.topic.TopicFavoriteModel;
import com.xiaojukeji.kafka.manager.web.vo.broker.BrokerMetadataVO;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.DateUtils;
import com.xiaojukeji.kafka.manager.web.converters.TopicModelConverter;
import com.xiaojukeji.kafka.manager.web.vo.topic.*;
import io.swagger.annotations.*;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author huangyiminghappy@163.com, zengqiao_cn@163.com
* @date 19/6/2
*/
@Api(value = "TopicController", description = "Topic相关接口")
@Controller
@RequestMapping("api/v1/")
public class TopicController {
private final static Logger logger = LoggerFactory.getLogger(TopicController.class);
@Autowired
private ClusterService clusterService;
@Autowired
private TopicService topicService;
@Autowired
private TopicManagerService topicManagerService;
@Autowired
private RegionService regionService;
@ApiOperation(value = "Topic元信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = BrokerMetadataVO.class)
@RequestMapping(value = "{clusterId}/topics/{topicName}/metadata", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<TopicMetadataVO> getTopicMetadata(@PathVariable Long clusterId, @PathVariable String topicName) {
if (clusterId == null || StringUtils.isEmpty(topicName)) {
return new Result<>(StatusCode.PARAM_ERROR, "参数错误");
}
TopicMetadata topicMetadata = ClusterMetadataManager.getTopicMetaData(clusterId, topicName);
if (topicMetadata == null) {
return new Result<>(StatusCode.PARAM_ERROR, "参数错误, Topic不存在");
}
return new Result<>(TopicModelConverter.convert2TopicMetadataVO(clusterId, topicMetadata));
}
@ApiOperation(value = "收藏Topic", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "topics/favorite", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result addFavorite(@RequestBody TopicFavoriteModel reqObj) {
if (reqObj == null || !reqObj.legal()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
String username = SpringContextHolder.getUserName();
List<TopicFavorite> topicFavoriteList = reqObj.getTopicFavoriteList();
try {
Boolean result = topicManagerService.addFavorite(TopicModelConverter.convert2TopicFavoriteDOList(username, topicFavoriteList));
if (!Boolean.TRUE.equals(result)) {
return new Result(StatusCode.OPERATION_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
} catch (Exception e) {
logger.error("addFavorite@TopicController, add failed, username:{} req:{}.", username, reqObj, e);
return new Result(StatusCode.OPERATION_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result<>();
}
@ApiOperation(value = "取消收藏Topic", httpMethod = "DELETE", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = Result.class)
@RequestMapping(value = "topics/favorite", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result delFavorite(@RequestBody TopicFavoriteModel reqObj) {
if (reqObj == null || !reqObj.legal()) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
String username = SpringContextHolder.getUserName();
List<TopicFavorite> topicFavoriteList = reqObj.getTopicFavoriteList();
try {
Boolean result = topicManagerService.delFavorite(TopicModelConverter.convert2TopicFavoriteDOList(username, topicFavoriteList));
if (!Boolean.TRUE.equals(result)) {
return new Result(StatusCode.OPERATION_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
} catch (Exception e) {
logger.error("delFavorite@TopicController, del failed, username:{} req{}.", username, reqObj, e);
return new Result(StatusCode.OPERATION_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
return new Result<>();
}
@ApiOperation(value = "Topic列表[包括收藏列表]", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicOverviewVO.class)
@RequestMapping(value = "topics", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<TopicOverviewVO>> getTopicInfoList(@ApiParam(name = "clusterId", value = "集群Id") @RequestParam(value = "clusterId", required = false) Long clusterId,
@ApiParam(name = "favorite", value = "收藏[True:是, False:否]") @RequestParam(value = "favorite", required = false) Boolean favorite) {
String username = SpringContextHolder.getUserName();
// 获取关注的Topic列表
List<TopicFavoriteDO> topicFavoriteDOList = topicManagerService.getFavorite(username, clusterId);
List<ClusterDO> clusterDOList = clusterService.listAll();
List<TopicOverviewVO> topicOverviewVOList = new ArrayList<>();
for (ClusterDO clusterDO: clusterDOList) {
if (clusterId != null && !clusterDO.getId().equals(clusterId)) {
continue;
}
// 获取Topic的负责人
List<TopicDO> topicDOList = new ArrayList<>();
try {
topicDOList = topicManagerService.getByClusterId(clusterDO.getId());
} catch (Exception e) {
logger.error("getTopicInfoList@TopicController, get topics from db error, clusterId:{}.", clusterDO.getId(), e);
}
// 过滤获取到需要查询JMX信息的Topic
List<String> filterTopicNameList = null;
if (Boolean.TRUE.equals(favorite)) {
filterTopicNameList = topicFavoriteDOList.stream().filter(elem -> elem.getClusterId().equals(clusterDO.getId())).map(elem -> elem.getTopicName()).collect(Collectors.toList());
}
// 获取Topic的元信息
List<TopicOverviewDTO> topicOverviewDTOList = new ArrayList<>();
try {
topicOverviewDTOList = topicService.getTopicOverviewDTOList(clusterDO.getId(), -1, filterTopicNameList);
} catch (Exception e) {
logger.error("getTopicInfoList@TopicController, get topics error, clusterId:{}.", clusterDO.getId(), e);
}
// 合并数据
topicOverviewVOList.addAll(TopicModelConverter.convert2TopicOverviewVOList(clusterDO, topicOverviewDTOList, topicDOList, topicFavoriteDOList));
}
return new Result<>(topicOverviewVOList);
}
@ApiOperation(value = "获取Topic名称列表", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = String.class)
@RequestMapping(value = "{clusterId}/topics/topic-names", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<String>> getTopicNameList(@ApiParam(name = "clusterId", required = true, value = "集群ID") @PathVariable Long clusterId) {
if (clusterId == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO cluster = clusterService.getById(clusterId);
if (cluster == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
return new Result<>(ClusterMetadataManager.getTopicNameList(clusterId));
}
@ApiOperation(value = "获取Topic的基本信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicBasicVO.class)
@RequestMapping(value = "{clusterId}/topics/{topicName}/basic-info", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<TopicBasicVO> getTopicBasicInfo(@ApiParam(name = "clusterId", required = true, value = "集群ID") @PathVariable Long clusterId,
@ApiParam(name = "topicName", required = true, value = "Topic名字") @PathVariable String topicName) {
if (clusterId == null || StringUtils.isEmpty(topicName)) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
TopicBasicDTO topicBasicDTO = topicService.getTopicBasicDTO(clusterId, topicName);
TopicDO topicDO = topicManagerService.getByTopicName(clusterId, topicName);
List<RegionDO> regionList = regionService.getRegionByTopicName(clusterId, topicName);
return new Result<>(TopicModelConverter.convert2TopicBasicVO(topicBasicDTO, topicDO, regionList));
}
@ApiOperation(value = "Topic采样", httpMethod = "POST", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicDataSampleVO.class)
@RequestMapping(value = "{clusterId}/topics/{topicName}/sample", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<TopicDataSampleVO>> previewTopic(@ApiParam(name = "clusterId", required = true, value = "集群ID") @PathVariable Long clusterId,
@ApiParam(name = "topicName", required = true, value = "Topic名称") @PathVariable String topicName,
@ApiParam(name = "topicDataSampleModel", required = true, value = "请求参数") @RequestBody TopicDataSampleModel topicDataSampleModel) {
if (clusterId == null || StringUtils.isEmpty(topicName)) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal");
}
ClusterDO clusterDO = clusterService.getById(clusterId);
if (clusterDO == null || !ClusterMetadataManager.isTopicExist(clusterId, topicName)) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, cluster or topic not exist");
}
if (topicDataSampleModel == null || !topicDataSampleModel.legal()) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, request body illegal");
}
List<String> dataList = buildSampleDataList(clusterDO, topicName, topicDataSampleModel);
if (dataList == null) {
return new Result<>(StatusCode.OPERATION_ERROR, "fetch data failed");
}
List<TopicDataSampleVO> topicDataSampleVOList = new ArrayList<>();
for (String data: dataList) {
TopicDataSampleVO topicDataSampleVO = new TopicDataSampleVO();
topicDataSampleVO.setValue(data);
topicDataSampleVOList.add(topicDataSampleVO);
}
return new Result<>(topicDataSampleVOList);
}
private List<String> buildSampleDataList(ClusterDO clusterDO, String topicName, TopicDataSampleModel topicDataSampleModel) {
int partitionId = topicDataSampleModel.getPartitionId();
int maxMsgNum = topicDataSampleModel.getMaxMsgNum();
int timeout = topicDataSampleModel.getTimeout();
long offset = topicDataSampleModel.getOffset();
boolean truncate = topicDataSampleModel.isTruncate();
List<TopicPartition> topicPartitionList = new ArrayList<>();
topicPartitionList.add(new TopicPartition(topicName, partitionId));
return topicService.fetchTopicData(clusterDO, topicPartitionList, timeout, maxMsgNum, offset, truncate);
}
@ApiOperation(value = "Topic实时流量信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicRealTimeMetricsVO.class)
@RequestMapping(value = "{clusterId}/topics/{topicName}/metrics", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<TopicRealTimeMetricsVO> getTopicMetric(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId,
@ApiParam(name = "topicName", required = true, value = "topic名字") @PathVariable String topicName) {
if (!ClusterMetadataManager.isTopicExist(clusterId, topicName)) {
return new Result<>(Integer.valueOf(StatusCode.PARAM_ERROR), "param illegal, topic not exist");
}
TopicMetrics topicMetrics = topicService.getTopicMetrics(clusterId, topicName, TopicMetrics.getFieldNameList(MetricsType.TOPIC_FLOW_DETAIL));
if (null == topicMetrics) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, topic not exist");
}
return new Result<>(TopicModelConverter.convert2TopicRealTimeMetricsVO(topicMetrics));
}
@ApiOperation(value = "获取Topic历史流量信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicMetrics.class)
@RequestMapping(value = "{clusterId}/topics/{topicName}/metrics-history", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<TopicMetricsVO>> getMetricsOfTopic(@ApiParam(name = "clusterId", required = true, value = "集群Id") @PathVariable Long clusterId,
@ApiParam(name = "topicName", required = true, value = "topic名字") @PathVariable String topicName,
@ApiParam(name = "startTime", required = true, value = "开始时间") @RequestParam("startTime") Long startTime,
@ApiParam(name = "endTime", required = true, value = "结束时间") @RequestParam("endTime") Long endTime) {
if (clusterId < 0 || StringUtils.isEmpty(topicName)) {
logger.error("getMetricsOfTopic@TopicController, parameters are invalid.");
return new Result<>(StatusCode.PARAM_ERROR, "集群参数和topic不能为空");
}
List<TopicMetrics> topicMetricsList = topicService.getTopicMetricsByInterval(clusterId, topicName, DateUtils.long2Date(startTime), DateUtils.long2Date(endTime));
List<TopicMetricsVO> result = new ArrayList<>();
for(TopicMetrics tm : topicMetricsList){
TopicMetricsVO topicMetricsVO = new TopicMetricsVO();
CopyUtils.copyProperties(topicMetricsVO, tm);
topicMetricsVO.setGmtCreate(tm.getGmtCreate().getTime());
result.add(topicMetricsVO);
}
return new Result<>(result);
}
@ApiOperation(value = "获取Topic分区信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicPartitionVO.class)
@RequestMapping(value = "{clusterId}/topics/{topicName}/partitions", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<TopicPartitionVO>> getPartitionsOfTopic(@PathVariable @ApiParam(name = "clusterId", required = true, value = "集群Id") Long clusterId, @PathVariable @ApiParam(name = "TopicName", required = true, value = "Topic名称") String topicName) {
Result result = paramCheck(clusterId, topicName);
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return result;
}
ClusterDO cluster = (ClusterDO) result.getData();
List<TopicPartitionDTO> topicPartitionDTOList = topicService.getTopicPartitionDTO(cluster, topicName, true);
if (topicPartitionDTOList == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, topic not EXIST");
}
return new Result<>(TopicModelConverter.convert2TopicPartitionVOList(topicPartitionDTOList));
}
@ApiOperation(value = "查询Topic的Broker信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicBrokerVO.class)
@RequestMapping(value = "{clusterId}/topics/{topicName}/brokers", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<TopicBrokerVO>> getBrokersOfTopic(@PathVariable @ApiParam(name = "clusterId", required = true, value = "集群Id") Long clusterId,
@PathVariable @ApiParam(name = "topicName", required = true, value = "Topic名称") String topicName) {
Result result = paramCheck(clusterId, topicName);
if (!StatusCode.SUCCESS.equals(result.getCode())) {
return result;
}
ClusterDO cluster = (ClusterDO) result.getData();
TopicMetadata topicMetadata = ClusterMetadataManager.getTopicMetaData(clusterId, topicName);
if (topicMetadata == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, topic not exist");
}
List<TopicPartitionDTO> topicPartitionDTOList = topicService.getTopicPartitionDTO(cluster, topicName, false);
return new Result<>(TopicModelConverter.convert2TopicBrokerVOList(cluster, topicMetadata, topicPartitionDTOList));
}
@ApiOperation(value = "查询指定时间的offset信息", httpMethod = "GET", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponse(code = 200, message = "success", response = TopicOffsetVO.class)
@RequestMapping(value = "{clusterId}/topics/{topicName}/offsets", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Result<List<TopicOffsetVO>> getOffsetsOfTopic(@PathVariable @ApiParam(name = "clusterId", required = true, value = "集群Id") Long clusterId,
@PathVariable @ApiParam(name = "topicName", required = true, value = "topic名字") String topicName,
@RequestParam("timestamp") @ApiParam(name = "timestamp", required = true, value = "时间戳(ms)") Long timestamp) {
if (clusterId < 0 || StringUtils.isEmpty(topicName) || timestamp == null) {
return new Result<>(StatusCode.PARAM_ERROR,"param illegal, please check clusterId, topicName and timestamp");
}
ClusterDO cluster = clusterService.getById(clusterId);
if (cluster == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, cluster not exist");
}
List<PartitionOffsetDTO> partitionOffsetDTOList = topicService.getPartitionOffsetList(cluster, topicName, timestamp);
if (partitionOffsetDTOList == null) {
return new Result<>(StatusCode.PARAM_ERROR, "param illegal, topic not exist");
}
return new Result<>(TopicModelConverter.convert2TopicOffsetVOList(clusterId, topicName, partitionOffsetDTOList));
}
/**
* 粗略检查参数是否合法
*/
private Result paramCheck(Long clusterId, String topicName) {
if (clusterId == null || clusterId < 0 || StringUtils.isEmpty(topicName)) {
return new Result(StatusCode.PARAM_ERROR, "params illegal");
}
ClusterDO cluster = null;
try {
cluster = clusterService.getById(clusterId);
} catch (Exception e) {
logger.error("paramCheck@TopicController, clusterId:{}.", clusterId, e);
return new Result(StatusCode.PARAM_ERROR, Constant.KAFKA_MANAGER_INNER_ERROR);
}
if (cluster == null) {
return new Result(StatusCode.PARAM_ERROR, "params illegal, cluster not EXIST");
}
return new Result<>(cluster);
}
}

View File

@@ -0,0 +1,51 @@
package com.xiaojukeji.kafka.manager.web.config;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
/**
* @author zengqiao
* @date 20/3/17
*/
@Configuration
public class DataSourceConfig {
@Bean(name = "dataSource")
@ConfigurationProperties(prefix = "spring.datasource.kafka-manager")
@Primary
public DataSource dataSource() {
return DataSourceBuilder.create().build();
}
@Bean(name = "sqlSessionFactory")
@Primary
public SqlSessionFactory sqlSessionFactory(@Qualifier("dataSource") DataSource dataSource) throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(dataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath:mapper/*.xml"));
bean.setConfigLocation(new PathMatchingResourcePatternResolver().getResource("classpath:mybatis-config.xml"));
return bean.getObject();
}
@Bean(name = "transactionManager")
@Primary
public DataSourceTransactionManager transactionManager(@Qualifier("dataSource") DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}
@Bean(name = "sqlSession")
@Primary
public SqlSessionTemplate sqlSessionTemplate(@Qualifier("sqlSessionFactory") SqlSessionFactory sqlSessionFactory) throws Exception {
return new SqlSessionTemplate(sqlSessionFactory);
}
}

View File

@@ -0,0 +1,49 @@
package com.xiaojukeji.kafka.manager.web.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.*;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
/**
* swagger配置页面
* @author huangyiminghappy@163.com
* @date 2019-05-09
*/
@Configuration
@EnableWebMvc
@EnableSwagger2
public class SwaggerConfig implements WebMvcConfigurer {
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
}
@Bean
public Docket createRestApi() {
return new Docket(DocumentationType.SWAGGER_2)
.apiInfo(apiInfo())
.select()
.apis(RequestHandlerSelectors.basePackage("com.xiaojukeji.kafka.manager.web.api.versionone"))
.paths(PathSelectors.any())
.build()
.enable(true);
}
private ApiInfo apiInfo() {
return new ApiInfoBuilder()
.title("接口文档")
.description("欢迎使用滴滴出行开源的kafka-manager")
.contact("huangyiminghappy@163.com")
.version("1.0")
.build();
}
}

View File

@@ -0,0 +1,50 @@
package com.xiaojukeji.kafka.manager.web.config;
import com.xiaojukeji.kafka.manager.web.inteceptor.PermissionInterceptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
* @author zengqiao
* @date 20/1/19
*/
@SpringBootConfiguration
@Component
@DependsOn({"permissionInterceptor"})
public class WebMvcConfig implements WebMvcConfigurer {
@Autowired
private PermissionInterceptor permissionInterceptor;
@Override
public void addViewControllers(ViewControllerRegistry registry) {
registry.addViewController("/").setViewName("index");
registry.addViewController("/index.html").setViewName("index");
registry.addViewController("/login").setViewName("index");
registry.addViewController("/login/**").setViewName("index");
registry.addViewController("/admin").setViewName("index");
registry.addViewController("/admin/**").setViewName("index");
registry.addViewController("/user").setViewName("index");
registry.addViewController("/user/**").setViewName("index");
}
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(permissionInterceptor).addPathPatterns("/api/v1/**");
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
// SWAGGER
registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
// FE
registry.addResourceHandler("index.html", "/**").addResourceLocations("classpath:/templates/","classpath:/static/");
}
}

View File

@@ -0,0 +1,33 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.web.model.AccountModel;
import com.xiaojukeji.kafka.manager.common.entity.po.AccountDO;
import com.xiaojukeji.kafka.manager.web.vo.AccountVO;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 19/5/3
*/
public class AccountConverter {
public static AccountDO convert2AccountDO(AccountModel accountModel) {
AccountDO accountDO = new AccountDO();
accountDO.setUsername(accountModel.getUsername());
accountDO.setPassword(accountModel.getPassword());
accountDO.setRole(accountModel.getRole());
return accountDO;
}
public static List<AccountVO> convert2AccountVOList(List<AccountDO> accountDOList) {
if (accountDOList == null) {
return new ArrayList<>();
}
List<AccountVO> userVOList = new ArrayList<>();
for (AccountDO accountDO: accountDOList) {
userVOList.add(new AccountVO(accountDO.getUsername(), null, accountDO.getRole()));
}
return userVOList;
}
}

View File

@@ -0,0 +1,77 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.xiaojukeji.kafka.manager.web.vo.MigrationDetailVO;
import com.xiaojukeji.kafka.manager.web.vo.MigrationTaskVO;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.po.MigrationTaskDO;
import com.xiaojukeji.kafka.manager.web.vo.PartitionReassignmentVO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author zengqiao
* @date 19/4/16
*/
public class AdminMigrationConverter {
private static final Logger logger = LoggerFactory.getLogger(AdminMigrationConverter.class);
public static MigrationDetailVO convert2MigrationDetailVO(MigrationTaskDO migrationTaskDO,
Map<Integer, Integer> migrationStatusMap) {
MigrationDetailVO migrationDetailVO = new MigrationDetailVO();
migrationDetailVO.setTaskId(migrationTaskDO.getId());
migrationDetailVO.setClusterId(migrationTaskDO.getClusterId());
migrationDetailVO.setTopicName(migrationTaskDO.getTopicName());
migrationDetailVO.setThrottle(migrationTaskDO.getThrottle());
migrationDetailVO.setStatus(migrationTaskDO.getStatus());
migrationDetailVO.setGmtCreate(migrationTaskDO.getGmtCreate());
migrationDetailVO.setMigrationStatus(migrationStatusMap);
Map<Integer, List<Integer>> reassignmentMap = new HashMap<>(2);
try {
JSONObject js = JSON.parseObject(migrationTaskDO.getReassignmentJson());
List<PartitionReassignmentVO> partitionReassignmentVOList = JSONObject.parseArray(JSON.toJSONString(js.getJSONArray("partitions")), PartitionReassignmentVO.class);
for (PartitionReassignmentVO partitionReassignmentVO: partitionReassignmentVOList) {
reassignmentMap.put(partitionReassignmentVO.getPartition(), partitionReassignmentVO.getReplicas());
}
} catch (Exception e) {
logger.error("convert2MigrationDetailVO@AdminMigrationConverter, migrationTaskDO:{}.", migrationTaskDO, e);
}
migrationDetailVO.setReassignmentMap(reassignmentMap);
return migrationDetailVO;
}
public static List<MigrationTaskVO> convert2MigrationTaskVOList(List<MigrationTaskDO> migrationTaskDOList,
List<ClusterDO> clusterDOList) {
if (migrationTaskDOList == null) {
return new ArrayList<>();
}
Map<Long, String> clusterMap = new HashMap<>();
if (clusterDOList == null) {
clusterDOList = new ArrayList<>();
}
for (ClusterDO clusterDO: clusterDOList) {
clusterMap.put(clusterDO.getId(), clusterDO.getClusterName());
}
List<MigrationTaskVO> migrationTaskVOList = new ArrayList<>();
for (MigrationTaskDO migrationTaskDO: migrationTaskDOList) {
MigrationTaskVO migrationTaskVO = new MigrationTaskVO();
migrationTaskVO.setTaskId(migrationTaskDO.getId());
migrationTaskVO.setClusterId(migrationTaskDO.getClusterId());
migrationTaskVO.setClusterName(clusterMap.getOrDefault(migrationTaskDO.getClusterId(), ""));
migrationTaskVO.setTopicName(migrationTaskDO.getTopicName());
migrationTaskVO.setStatus(migrationTaskDO.getStatus());
migrationTaskVO.setThrottle(migrationTaskDO.getThrottle());
migrationTaskVO.setGmtCreate(migrationTaskDO.getGmtCreate().getTime());
migrationTaskVO.setOperator(migrationTaskDO.getOperator());
migrationTaskVOList.add(migrationTaskVO);
}
return migrationTaskVOList;
}
}

View File

@@ -0,0 +1,73 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.alibaba.fastjson.JSON;
import com.xiaojukeji.kafka.manager.web.model.topic.AdminTopicModel;
import com.xiaojukeji.kafka.manager.web.vo.topic.TopicDetailVO;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.DBStatusEnum;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.po.TopicDO;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.TopicMetadata;
import com.xiaojukeji.kafka.manager.service.utils.ListUtils;
import org.apache.commons.lang.StringUtils;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
/**
* @author zengqiao
* @date 19/7/11
*/
public class AdminUtilConverter {
public static TopicDO convert2TopicDO(AdminTopicModel reqObj) {
TopicDO topicDO = new TopicDO();
topicDO.setClusterId(reqObj.getClusterId());
topicDO.setTopicName(reqObj.getTopicName());
topicDO.setPrincipals(ListUtils.strList2String(reqObj.getPrincipalList()));
topicDO.setStatus(DBStatusEnum.NORMAL.getStatus());
topicDO.setDescription(StringUtils.isEmpty(reqObj.getDescription())? "": reqObj.getDescription());
return topicDO;
}
public static TopicMetadata convert2TopicMetadata(String topicName,
Integer partitionNum,
Integer replicaNum,
List<Integer> brokerIdList) {
TopicMetadata topicMetadata = new TopicMetadata();
topicMetadata.setTopic(topicName);
topicMetadata.setBrokerIdSet(new HashSet<>(brokerIdList));
topicMetadata.setPartitionNum(partitionNum);
topicMetadata.setReplicaNum(replicaNum);
return topicMetadata;
}
public static TopicDetailVO convert2TopicDetailVO(ClusterDO clusterDO,
TopicMetadata topicMetadata,
Properties properties,
TopicDO topicDO) {
TopicDetailVO topicDetailVO = new TopicDetailVO();
topicDetailVO.setClusterId(clusterDO.getId());
topicDetailVO.setTopicName(topicMetadata.getTopic());
topicDetailVO.setGmtCreate(topicMetadata.getCreateTime());
topicDetailVO.setGmtModify(topicMetadata.getModifyTime());
topicDetailVO.setPartitionNum(topicMetadata.getPartitionNum());
topicDetailVO.setReplicaNum(topicMetadata.getReplicaNum());
if (topicDO != null) {
topicDetailVO.setPrincipalList(ListUtils.string2StrList(topicDO.getPrincipals()));
topicDetailVO.setDescription(topicDO.getDescription());
}
if (properties == null) {
properties = new Properties();
}
topicDetailVO.setProperties(JSON.toJSONString(properties));
Object retentionTime = properties.get("retention.ms");
if (retentionTime != null && retentionTime instanceof String) {
topicDetailVO.setRetentionTime(Long.valueOf((String) retentionTime ));
} else if (retentionTime != null && retentionTime instanceof Long) {
topicDetailVO.setRetentionTime((Long) retentionTime);
} else if (retentionTime != null && retentionTime instanceof Integer) {
topicDetailVO.setRetentionTime(Long.valueOf((Integer) retentionTime ));
}
return topicDetailVO;
}
}

View File

@@ -0,0 +1,71 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.alibaba.fastjson.JSON;
import com.xiaojukeji.kafka.manager.web.model.alarm.AlarmRuleModel;
import com.xiaojukeji.kafka.manager.web.vo.alarm.AlarmRuleVO;
import com.xiaojukeji.kafka.manager.common.entity.po.AlarmRuleDO;
import com.xiaojukeji.kafka.manager.service.utils.ListUtils;
import com.xiaojukeji.kafka.manager.common.entity.dto.alarm.AlarmStrategyActionDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.alarm.AlarmStrategyExpressionDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.alarm.AlarmStrategyFilterDTO;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 19/5/12
*/
public class AlarmConverter {
public static AlarmRuleDO convert2AlarmRuleDO(String applicant, AlarmRuleModel alarmRuleModel) {
AlarmRuleDO alarmRuleDO = new AlarmRuleDO();
alarmRuleDO.setId(alarmRuleModel.getId());
alarmRuleDO.setAlarmName(alarmRuleModel.getAlarmName());
alarmRuleDO.setStrategyExpressions(JSON.toJSONString(alarmRuleModel.getStrategyExpressionList()));
alarmRuleDO.setStrategyFilters(JSON.toJSONString(alarmRuleModel.getStrategyFilterList()));
alarmRuleDO.setStrategyActions(JSON.toJSONString(alarmRuleModel.getStrategyActionList()));
if (!alarmRuleModel.getPrincipalList().contains(applicant)) {
alarmRuleModel.getPrincipalList().add(applicant);
}
alarmRuleDO.setPrincipals(ListUtils.strList2String(alarmRuleModel.getPrincipalList()));
alarmRuleDO.setStatus(alarmRuleModel.getStatus());
return alarmRuleDO;
}
public static AlarmRuleVO convert2AlarmRuleVO(AlarmRuleDO alarmRuleDO) {
if (alarmRuleDO == null) {
return null;
}
AlarmRuleVO alarmRuleVO = new AlarmRuleVO();
try {
alarmRuleVO.setStrategyActionList(JSON.parseArray(alarmRuleDO.getStrategyActions(), AlarmStrategyActionDTO.class));
} catch (Exception e) {
}
try {
alarmRuleVO.setStrategyExpressionList(JSON.parseArray(alarmRuleDO.getStrategyExpressions(), AlarmStrategyExpressionDTO.class));
} catch (Exception e) {
}
try {
alarmRuleVO.setStrategyFilterList(JSON.parseArray(alarmRuleDO.getStrategyFilters(), AlarmStrategyFilterDTO.class));
} catch (Exception e) {
}
alarmRuleVO.setId(alarmRuleDO.getId());
alarmRuleVO.setAlarmName(alarmRuleDO.getAlarmName());
alarmRuleVO.setPrincipalList(ListUtils.string2StrList(alarmRuleDO.getPrincipals()));
alarmRuleVO.setStatus(alarmRuleDO.getStatus());
alarmRuleVO.setGmtCreate(alarmRuleDO.getGmtCreate().getTime());
alarmRuleVO.setGmtModify(alarmRuleDO.getGmtModify().getTime());
return alarmRuleVO;
}
public static List<AlarmRuleVO> convert2AlarmRuleVOList(List<AlarmRuleDO> alarmRuleDOList) {
if (alarmRuleDOList == null) {
return new ArrayList<>();
}
List<AlarmRuleVO> alarmRuleVOList = new ArrayList<>();
for (AlarmRuleDO alarmRuleDO: alarmRuleDOList) {
alarmRuleVOList.add(convert2AlarmRuleVO(alarmRuleDO));
}
return alarmRuleVOList;
}
}

View File

@@ -0,0 +1,293 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.dto.analysis.AnalysisBrokerDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.analysis.AnalysisTopicDTO;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.PartitionState;
import com.xiaojukeji.kafka.manager.common.entity.dto.BrokerOverallDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.BrokerOverviewDTO;
import com.xiaojukeji.kafka.manager.common.entity.metrics.BrokerMetrics;
import com.xiaojukeji.kafka.manager.common.entity.po.RegionDO;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.BrokerMetadata;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.TopicMetadata;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.service.cache.ClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.utils.ListUtils;
import com.xiaojukeji.kafka.manager.service.utils.ObjectUtil;
import com.xiaojukeji.kafka.manager.web.vo.broker.*;
import java.util.*;
/**
* @author zengqiao
* @date 19/4/21
*/
public class BrokerModelConverter {
private static Map<Integer, String> convert2BrokerIdRegionNameMap(List<RegionDO> regionDOList) {
Map<Integer, String> brokerIdRegionNameMap = new HashMap<>();
if (regionDOList == null) {
regionDOList = new ArrayList<>();
}
for (RegionDO regionDO: regionDOList) {
List<Integer> brokerIdList = ListUtils.string2IntList(regionDO.getBrokerList());
if (brokerIdList == null || brokerIdList.isEmpty()) {
continue;
}
for (Integer brokerId: brokerIdList) {
brokerIdRegionNameMap.put(brokerId, regionDO.getRegionName());
}
}
return brokerIdRegionNameMap;
}
public static List<BrokerOverviewVO> convert2BrokerOverviewList(List<BrokerOverviewDTO> brokerOverviewDTOList,
List<RegionDO> regionDOList) {
if (brokerOverviewDTOList == null) {
return new ArrayList<>();
}
Map<Integer, String> brokerIdRegionNameMap = convert2BrokerIdRegionNameMap(regionDOList);
List<BrokerOverviewVO> brokerOverviewVOList = new ArrayList<>();
for (BrokerOverviewDTO brokerOverviewDTO: brokerOverviewDTOList) {
BrokerOverviewVO brokerOverviewVO = new BrokerOverviewVO();
CopyUtils.copyProperties(brokerOverviewVO, brokerOverviewDTO);
brokerOverviewVO.setRegionName(brokerIdRegionNameMap.getOrDefault(brokerOverviewDTO.getBrokerId(), ""));
brokerOverviewVOList.add(brokerOverviewVO);
}
Collections.sort(brokerOverviewVOList);
return brokerOverviewVOList;
}
public static List<BrokerOverallVO> convert2BrokerOverallVOList(Long clusterId,
List<BrokerOverallDTO> brokerOverallDTOList,
List<RegionDO> regionDOList) {
if (brokerOverallDTOList == null) {
return new ArrayList<>();
}
Map<Integer, String> brokerIdRegionNameMap = convert2BrokerIdRegionNameMap(regionDOList);
List<BrokerOverallVO> brokerOverallVOList = new ArrayList<>();
for (BrokerOverallDTO brokerOverallDTO: brokerOverallDTOList) {
BrokerMetadata brokerMetadata = ClusterMetadataManager.getBrokerMetadata(clusterId, brokerOverallDTO.getBrokerId());
BrokerOverallVO brokerOverviewVO = new BrokerOverallVO();
brokerOverviewVO.setBrokerId(brokerOverallDTO.getBrokerId());
brokerOverviewVO.setHost(brokerMetadata.getHost());
brokerOverviewVO.setPort(brokerMetadata.getPort());
brokerOverviewVO.setJmxPort(brokerMetadata.getJmxPort());
if (brokerOverallDTO.getBytesInPerSec() != null) {
Double bytesInPerSec = brokerOverallDTO.getBytesInPerSec() / 1024.0 / 1024.0;
brokerOverviewVO.setBytesInPerSec(Math.round(bytesInPerSec * 100) / 100.0);
}
brokerOverviewVO.setLeaderCount(brokerOverallDTO.getLeaderCount());
if (brokerOverallDTO.getPartitionCount() != null && brokerOverallDTO.getUnderReplicatedPartitions() != null) {
brokerOverviewVO.setNotUnderReplicatedPartitionCount(brokerOverallDTO.getPartitionCount() - brokerOverallDTO.getUnderReplicatedPartitions());
}
brokerOverviewVO.setPartitionCount(brokerOverallDTO.getPartitionCount());
brokerOverviewVO.setStartTime(brokerMetadata.getTimestamp());
brokerOverviewVO.setRegionName(brokerIdRegionNameMap.getOrDefault(brokerOverallDTO.getBrokerId(), ""));
brokerOverallVOList.add(brokerOverviewVO);
}
return brokerOverallVOList;
}
public static List<BrokerMetricsVO> convert2BrokerMetricsVOList(List<BrokerMetrics> brokerMetricsList) {
if (brokerMetricsList == null) {
return new ArrayList<>();
}
List<BrokerMetricsVO> brokerMetricsVOList = new ArrayList<>(brokerMetricsList.size());
for (BrokerMetrics brokerMetrics: brokerMetricsList) {
BrokerMetricsVO brokerMetricsVO = new BrokerMetricsVO();
brokerMetricsVO.setBytesInPerSec(brokerMetrics.getBytesInPerSec());
brokerMetricsVO.setBytesOutPerSec(brokerMetrics.getBytesOutPerSec());
brokerMetricsVO.setMessagesInPerSec(brokerMetrics.getMessagesInPerSec());
brokerMetricsVO.setBytesRejectedPerSec(brokerMetrics.getBytesRejectedPerSec());
brokerMetricsVO.setGmtCreate(brokerMetrics.getGmtCreate().getTime());
brokerMetricsVOList.add(brokerMetricsVO);
}
return brokerMetricsVOList;
}
public static BrokerStatusVO convertBroker2BrokerMetricsVO(List<BrokerMetrics> brokerMetricsList) {
if (brokerMetricsList == null) {
return null;
}
BrokerMetrics sumBrokerMetrics = new BrokerMetrics();
for (BrokerMetrics brokerMetrics : brokerMetricsList) {
ObjectUtil.add(sumBrokerMetrics, brokerMetrics, "PerSec");
}
BrokerStatusVO brokerMetricsVO = new BrokerStatusVO();
List<Double> byteIn = new ArrayList<>(4);
List<Double> byteOut = new ArrayList<>(4);
List<Double> messageIn = new ArrayList<>(4);
List<Double> byteRejected = new ArrayList<>(4);
List<Double> failedFetchRequest = new ArrayList<>(4);
List<Double> failedProduceRequest = new ArrayList<>(4);
List<Double> fetchConsumerRequest = new ArrayList<>(4);
List<Double> produceRequest = new ArrayList<>(4);
byteIn.add(sumBrokerMetrics.getBytesInPerSecMeanRate());
byteIn.add(sumBrokerMetrics.getBytesInPerSec());
byteIn.add(sumBrokerMetrics.getBytesInPerSecFiveMinuteRate());
byteIn.add(sumBrokerMetrics.getBytesInPerSecFifteenMinuteRate());
byteOut.add(sumBrokerMetrics.getBytesOutPerSecMeanRate());
byteOut.add(sumBrokerMetrics.getBytesOutPerSec());
byteOut.add(sumBrokerMetrics.getBytesOutPerSecFiveMinuteRate());
byteOut.add(sumBrokerMetrics.getBytesOutPerSecFifteenMinuteRate());
messageIn.add(sumBrokerMetrics.getMessagesInPerSecMeanRate());
messageIn.add(sumBrokerMetrics.getMessagesInPerSec());
messageIn.add(sumBrokerMetrics.getMessagesInPerSecFiveMinuteRate());
messageIn.add(sumBrokerMetrics.getMessagesInPerSecFifteenMinuteRate());
byteRejected.add(sumBrokerMetrics.getBytesRejectedPerSecMeanRate());
byteRejected.add(sumBrokerMetrics.getBytesRejectedPerSec());
byteRejected.add(sumBrokerMetrics.getBytesRejectedPerSecFiveMinuteRate());
byteRejected.add(sumBrokerMetrics.getBytesRejectedPerSecFifteenMinuteRate());
failedProduceRequest.add(sumBrokerMetrics.getFailProduceRequestPerSecMeanRate());
failedProduceRequest.add(sumBrokerMetrics.getFailProduceRequestPerSec());
failedProduceRequest.add(sumBrokerMetrics.getFailProduceRequestPerSecFiveMinuteRate());
failedProduceRequest.add(sumBrokerMetrics.getFailProduceRequestPerSecFifteenMinuteRate());
fetchConsumerRequest.add(sumBrokerMetrics.getFetchConsumerRequestPerSecMeanRate());
fetchConsumerRequest.add(sumBrokerMetrics.getFetchConsumerRequestPerSec());
fetchConsumerRequest.add(sumBrokerMetrics.getFetchConsumerRequestPerSecFiveMinuteRate());
fetchConsumerRequest.add(sumBrokerMetrics.getFetchConsumerRequestPerSecFifteenMinuteRate());
failedFetchRequest.add(sumBrokerMetrics.getFailFetchRequestPerSecMeanRate());
failedFetchRequest.add(sumBrokerMetrics.getFailFetchRequestPerSec());
failedFetchRequest.add(sumBrokerMetrics.getFailFetchRequestPerSecFiveMinuteRate());
failedFetchRequest.add(sumBrokerMetrics.getFailFetchRequestPerSecFifteenMinuteRate());
produceRequest.add(sumBrokerMetrics.getProduceRequestPerSecMeanRate());
produceRequest.add(sumBrokerMetrics.getProduceRequestPerSec());
produceRequest.add(sumBrokerMetrics.getProduceRequestPerSecFiveMinuteRate());
produceRequest.add(sumBrokerMetrics.getProduceRequestPerSecFifteenMinuteRate());
brokerMetricsVO.setByteIn(byteIn);
brokerMetricsVO.setByteOut(byteOut);
brokerMetricsVO.setMessageIn(messageIn);
brokerMetricsVO.setByteRejected(byteRejected);
brokerMetricsVO.setFailedFetchRequest(failedFetchRequest);
brokerMetricsVO.setFailedProduceRequest(failedProduceRequest);
brokerMetricsVO.setProduceRequest(produceRequest);
brokerMetricsVO.setFetchConsumerRequest(fetchConsumerRequest);
return brokerMetricsVO;
}
public static List<BrokerPartitionsVO> convert2BrokerPartitionsVOList(Long clusterId,
Integer brokerId,
Map<String, List<PartitionState>> partitionStateMap){
List<BrokerPartitionsVO> brokerPartitionsVOList = new ArrayList<>();
for (String topicName: partitionStateMap.keySet()) {
BrokerPartitionsVO brokerPartitionsVO = convert2BrokerPartitionsVO(clusterId, brokerId, topicName, partitionStateMap.get(topicName));
if (brokerPartitionsVO == null) {
continue;
}
brokerPartitionsVOList.add(brokerPartitionsVO);
}
return brokerPartitionsVOList;
}
private static BrokerPartitionsVO convert2BrokerPartitionsVO(Long clusterId,
Integer brokerId,
String topicName,
List<PartitionState> partitionStateList){
TopicMetadata topicMetadata = ClusterMetadataManager.getTopicMetaData(clusterId, topicName);
if (null == partitionStateList || topicMetadata == null) {
return null;
}
Set<Integer> leaderPartitionIdSet = new HashSet<>();
Set<Integer> followerPartitionIdSet = new HashSet<>();
Set<Integer> notUnderReplicatedPartitionIdSet = new HashSet<>();
for (PartitionState partitionState : partitionStateList) {
List<Integer> replicaIdList = topicMetadata.getPartitionMap().getPartitions().get(partitionState.getPartitionId());
if (partitionState.getLeader() == brokerId) {
leaderPartitionIdSet.add(partitionState.getPartitionId());
} else if (replicaIdList.contains(brokerId)) {
followerPartitionIdSet.add(partitionState.getPartitionId());
}
if (replicaIdList.contains(brokerId) && partitionState.getIsr().size() < replicaIdList.size()) {
notUnderReplicatedPartitionIdSet.add(partitionState.getPartitionId());
}
}
BrokerPartitionsVO brokerPartitionsVO = new BrokerPartitionsVO();
brokerPartitionsVO.setTopicName(topicName);
brokerPartitionsVO.setLeaderPartitionList(new ArrayList<>(leaderPartitionIdSet));
brokerPartitionsVO.setFollowerPartitionIdList(new ArrayList<>(followerPartitionIdSet));
brokerPartitionsVO.setNotUnderReplicatedPartitionIdList(new ArrayList<>(notUnderReplicatedPartitionIdSet));
brokerPartitionsVO.setUnderReplicated(notUnderReplicatedPartitionIdSet.isEmpty());
return brokerPartitionsVO;
}
public static List<BrokerKeyMetricsVO> convert2BrokerKeyMetricsVOList(List<BrokerMetrics> brokerMetricsList) {
if (brokerMetricsList == null) {
return new ArrayList<>();
}
List<BrokerKeyMetricsVO> brokerKeyMetricsVOList = new ArrayList<>();
for (BrokerMetrics brokerMetrics : brokerMetricsList) {
brokerKeyMetricsVOList.add(BrokerModelConverter.convert2BrokerHealthVO(brokerMetrics));
}
return brokerKeyMetricsVOList;
}
private static BrokerKeyMetricsVO convert2BrokerHealthVO(BrokerMetrics brokerMetrics) {
if (null == brokerMetrics) {
return null;
}
BrokerKeyMetricsVO brokerKeyMetricsVO = new BrokerKeyMetricsVO();
brokerKeyMetricsVO.setId(brokerMetrics.getId());
brokerKeyMetricsVO.setFailFetchRequest(brokerMetrics.getFailFetchRequestPerSec());
brokerKeyMetricsVO.setFailProduceRequest(brokerMetrics.getFailProduceRequestPerSec());
brokerKeyMetricsVO.setLogFlushTime(brokerMetrics.getLogFlushRateAndTimeMs());
brokerKeyMetricsVO.setNetworkProcessorIdlPercent(brokerMetrics.getNetworkProcessorAvgIdlePercent());
brokerKeyMetricsVO.setRequestHandlerIdlPercent(brokerMetrics.getRequestHandlerAvgIdlePercent());
brokerKeyMetricsVO.setRequestQueueSize(brokerMetrics.getRequestQueueSize());
brokerKeyMetricsVO.setResponseQueueSize(brokerMetrics.getResponseQueueSize());
brokerKeyMetricsVO.setTotalTimeProduceMean(brokerMetrics.getTotalTimeProduceMean());
brokerKeyMetricsVO.setTotalTimeProduce99Th(brokerMetrics.getTotalTimeProduce99Th());
brokerKeyMetricsVO.setTotalTimeFetchConsumerMean(brokerMetrics.getTotalTimeFetchConsumerMean());
brokerKeyMetricsVO.setTotalTimeFetchConsumer99Th(brokerMetrics.getTotalTimeFetchConsumer99Th());
brokerKeyMetricsVO.setGmtCreate(brokerMetrics.getGmtCreate().getTime());
return brokerKeyMetricsVO;
}
public static AnalysisBrokerVO convert2AnalysisBrokerVO(AnalysisBrokerDTO analysisBrokerDTO) {
if (analysisBrokerDTO == null) {
return null;
}
AnalysisBrokerVO analysisBrokerVO = new AnalysisBrokerVO();
analysisBrokerVO.setClusterId(analysisBrokerDTO.getClusterId());
analysisBrokerVO.setBrokerId(analysisBrokerDTO.getBrokerId());
analysisBrokerVO.setBaseTime(System.currentTimeMillis());
analysisBrokerVO.setTopicAnalysisVOList(new ArrayList<>());
analysisBrokerVO.setBytesIn(analysisBrokerDTO.getBytesIn());
analysisBrokerVO.setBytesOut(analysisBrokerDTO.getBytesOut());
analysisBrokerVO.setMessagesIn(analysisBrokerDTO.getMessagesIn());
analysisBrokerVO.setTotalProduceRequests(analysisBrokerDTO.getTotalProduceRequests());
analysisBrokerVO.setTotalFetchRequests(analysisBrokerDTO.getTotalFetchRequests());
for (AnalysisTopicDTO analysisTopicDTO: analysisBrokerDTO.getTopicAnalysisVOList()) {
AnalysisTopicVO analysisTopicVO = new AnalysisTopicVO();
analysisTopicVO.setTopicName(analysisTopicDTO.getTopicName());
analysisTopicVO.setBytesIn(String.format("%.2f", analysisTopicDTO.getBytesIn() / 1024.0 / 1024.0));
analysisTopicVO.setBytesInRate(String.format("%.2f", analysisTopicDTO.getBytesInRate()));
analysisTopicVO.setBytesOut(String.format("%.2f", analysisTopicDTO.getBytesOut() / 1024.0 / 1024.0));
analysisTopicVO.setBytesOutRate(String.format("%.2f", analysisTopicDTO.getBytesOutRate()));
analysisTopicVO.setMessagesIn(String.format("%.2f", analysisTopicDTO.getMessagesIn()));
analysisTopicVO.setMessagesInRate(String.format("%.2f", analysisTopicDTO.getMessagesInRate()));
analysisTopicVO.setTotalFetchRequests(String.format("%.2f", analysisTopicDTO.getTotalFetchRequests()));
analysisTopicVO.setTotalFetchRequestsRate(String.format("%.2f", analysisTopicDTO.getTotalFetchRequestsRate()));
analysisTopicVO.setTotalProduceRequests(String.format("%.2f", analysisTopicDTO.getTotalProduceRequests()));
analysisTopicVO.setTotalProduceRequestsRate(String.format("%.2f", analysisTopicDTO.getTotalProduceRequestsRate()));
analysisBrokerVO.getTopicAnalysisVOList().add(analysisTopicVO);
}
return analysisBrokerVO;
}
}

View File

@@ -0,0 +1,118 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.web.model.ClusterModel;
import com.xiaojukeji.kafka.manager.web.vo.cluster.ClusterBasicVO;
import com.xiaojukeji.kafka.manager.web.vo.cluster.ClusterDetailVO;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.DBStatusEnum;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.po.ControllerDO;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.ControllerData;
import com.xiaojukeji.kafka.manager.service.cache.ClusterMetadataManager;
import com.xiaojukeji.kafka.manager.web.vo.KafkaControllerVO;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* cluster相关转换
* @author huangyiminghappy@163.com
* @date 2019/3/15
*/
public class ClusterModelConverter {
public static List<ClusterBasicVO> convert2ClusterBasicVOList(List<ClusterDO> clusterDOList) {
if (clusterDOList == null || clusterDOList.isEmpty()) {
return new ArrayList<>();
}
List<ClusterBasicVO> clusterBasicVOList = new ArrayList<>();
for (ClusterDO clusterDO: clusterDOList) {
clusterBasicVOList.add(convert2ClusterBasicVO(clusterDO));
}
return clusterBasicVOList;
}
public static ClusterBasicVO convert2ClusterBasicVO(ClusterDO clusterDO) {
ClusterBasicVO clusterBasicVO = new ClusterBasicVO();
clusterBasicVO.setClusterId(clusterDO.getId());
clusterBasicVO.setClusterName(clusterDO.getClusterName());
clusterBasicVO.setBootstrapServers(clusterDO.getBootstrapServers());
clusterBasicVO.setKafkaVersion(clusterDO.getKafkaVersion());
clusterBasicVO.setGmtCreate(clusterDO.getGmtCreate().getTime());
clusterBasicVO.setGmtModify(clusterDO.getGmtModify().getTime());
clusterBasicVO.setBrokerNum(ClusterMetadataManager.getBrokerIdList(clusterDO.getId()).size());
clusterBasicVO.setTopicNum(ClusterMetadataManager.getTopicNameList(clusterDO.getId()).size());
return clusterBasicVO;
}
public static List<ClusterDetailVO> convert2ClusterDetailVOList(List<ClusterDO> clusterDOList,
Map<Long, Long> clusterIdTagNumMap,
Map<Long, Integer> consumerGroupNumMap) {
List<ClusterDetailVO> clusterDetailVOList = new ArrayList<>();
for (ClusterDO clusterDO: clusterDOList) {
ClusterDetailVO clusterDetailVO = new ClusterDetailVO();
clusterDetailVO.setClusterId(clusterDO.getId());
clusterDetailVO.setClusterName(clusterDO.getClusterName());
clusterDetailVO.setZookeeper(clusterDO.getZookeeper());
clusterDetailVO.setBootstrapServers(clusterDO.getBootstrapServers());
clusterDetailVO.setKafkaVersion(clusterDO.getKafkaVersion());
clusterDetailVO.setSecurityProtocol(clusterDO.getSecurityProtocol());
clusterDetailVO.setSaslMechanism(clusterDO.getSaslMechanism());
clusterDetailVO.setSaslJaasConfig(clusterDO.getSaslJaasConfig());
clusterDetailVO.setAlarmFlag(clusterDO.getAlarmFlag());
clusterDetailVO.setStatus(clusterDO.getStatus());
clusterDetailVO.setGmtCreate(clusterDO.getGmtCreate().getTime());
clusterDetailVO.setGmtModify(clusterDO.getGmtModify().getTime());
if (DBStatusEnum.DELETED.getStatus().equals(clusterDO.getStatus())) {
clusterDetailVO.setBrokerNum(-1);
clusterDetailVO.setTopicNum(-1);
clusterDetailVO.setControllerId(-1);
clusterDetailVO.setRegionNum(-1);
clusterDetailVOList.add(clusterDetailVO);
continue;
}
clusterDetailVO.setRegionNum(clusterIdTagNumMap.getOrDefault(clusterDO.getId(), 0L).intValue());
clusterDetailVO.setBrokerNum(ClusterMetadataManager.getBrokerIdList(clusterDO.getId()).size());
clusterDetailVO.setTopicNum(ClusterMetadataManager.getTopicNameList(clusterDO.getId()).size());
ControllerData controllerData = ClusterMetadataManager.getControllerData(clusterDO.getId());
if (controllerData == null) {
clusterDetailVO.setControllerId(-1);
} else {
clusterDetailVO.setControllerId(controllerData.getBrokerid());
}
clusterDetailVO.setConsumerGroupNum(consumerGroupNumMap.getOrDefault(clusterDO.getId(), 0));
clusterDetailVOList.add(clusterDetailVO);
}
return clusterDetailVOList;
}
public static List<KafkaControllerVO> convert2KafkaControllerVOList(List<ControllerDO> controllerDOList) {
if (controllerDOList == null) {
return new ArrayList<>();
}
List<KafkaControllerVO> kafkaControllerVOList = new ArrayList<>();
for (ControllerDO kafkaControllerDO: controllerDOList) {
KafkaControllerVO kafkaControllerVO = new KafkaControllerVO();
kafkaControllerVO.setBrokerId(kafkaControllerDO.getBrokerId());
kafkaControllerVO.setHost(kafkaControllerDO.getHost());
kafkaControllerVO.setControllerVersion(kafkaControllerDO.getVersion());
kafkaControllerVO.setControllerTimestamp(kafkaControllerDO.getTimestamp());
kafkaControllerVOList.add(kafkaControllerVO);
}
return kafkaControllerVOList;
}
public static ClusterDO convert2ClusterDO(ClusterModel reqObj) {
ClusterDO clusterDO = new ClusterDO();
clusterDO.setClusterName(reqObj.getClusterName());
clusterDO.setZookeeper(reqObj.getZookeeper());
clusterDO.setKafkaVersion(reqObj.getKafkaVersion());
clusterDO.setBootstrapServers(reqObj.getBootstrapServers());
clusterDO.setId(reqObj.getClusterId());
clusterDO.setAlarmFlag(reqObj.getAlarmFlag() == null? 0: reqObj.getAlarmFlag());
clusterDO.setSecurityProtocol(reqObj.getSecurityProtocol() == null? "": reqObj.getSecurityProtocol());
clusterDO.setSaslMechanism(reqObj.getSaslMechanism() == null? "": reqObj.getSaslMechanism());
clusterDO.setSaslJaasConfig(reqObj.getSaslJaasConfig() == null? "": reqObj.getSaslJaasConfig());
return clusterDO;
}
}

View File

@@ -0,0 +1,55 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.web.vo.consumer.ConsumerGroupDetailVO;
import com.xiaojukeji.kafka.manager.common.entity.dto.consumer.ConsumeDetailDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.consumer.ConsumerGroupDTO;
import com.xiaojukeji.kafka.manager.web.vo.consumer.ConsumerGroupVO;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 19/4/3
*/
public class ConsumerModelConverter {
public static List<ConsumerGroupVO> convert2ConsumerGroupVOList(List<ConsumerGroupDTO> consumeGroupDTOList) {
if (consumeGroupDTOList == null || consumeGroupDTOList.isEmpty()) {
return new ArrayList<>();
}
List<ConsumerGroupVO> consumerGroupVOList = new ArrayList<>();
for (ConsumerGroupDTO consumeGroupDTO : consumeGroupDTOList) {
consumerGroupVOList.add(
new ConsumerGroupVO(consumeGroupDTO.getConsumerGroup(), consumeGroupDTO.getOffsetStoreLocation().name())
);
}
return consumerGroupVOList;
}
public static List<ConsumerGroupDetailVO> convert2ConsumerGroupDetailVO(Long clusterId, String topicName,
String consumeGroup, String location,
List<ConsumeDetailDTO> consumeDetailDTOList) {
if (consumeDetailDTOList == null || consumeDetailDTOList.isEmpty()) {
return new ArrayList<>();
}
List<ConsumerGroupDetailVO> consumerGroupDetailVOList = new ArrayList<>();
for (ConsumeDetailDTO consumeDetailDTO : consumeDetailDTOList) {
ConsumerGroupDetailVO consumerGroupDetailVO = new ConsumerGroupDetailVO();
consumerGroupDetailVO.setClusterId(clusterId);
consumerGroupDetailVO.setTopicName(topicName);
consumerGroupDetailVO.setConsumerGroup(consumeGroup);
consumerGroupDetailVO.setLocation(location);
consumerGroupDetailVO.setPartitionId(consumeDetailDTO.getPartitionId());
consumerGroupDetailVO.setClientId(consumeDetailDTO.getConsumerId());
consumerGroupDetailVO.setConsumeOffset(consumeDetailDTO.getConsumeOffset());
consumerGroupDetailVO.setPartitionOffset(consumeDetailDTO.getOffset());
if (consumeDetailDTO.getOffset() != null && consumeDetailDTO.getConsumeOffset() != null) {
consumerGroupDetailVO.setLag(consumeDetailDTO.getOffset() - consumeDetailDTO.getConsumeOffset());
}
consumerGroupDetailVOList.add(consumerGroupDetailVO);
}
return consumerGroupDetailVOList;
}
}

View File

@@ -0,0 +1,138 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.po.*;
import com.xiaojukeji.kafka.manager.web.model.order.OrderPartitionModel;
import com.xiaojukeji.kafka.manager.web.model.order.OrderTopicModel;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.DBStatusEnum;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.OrderStatusEnum;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.TopicMetadata;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.service.utils.ListUtils;
import com.xiaojukeji.kafka.manager.web.vo.order.OrderPartitionVO;
import com.xiaojukeji.kafka.manager.web.vo.order.OrderTopicVO;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @author zengqiao
* @date 19/6/18
*/
public class OrderConverter {
public static OrderTopicDO convert2OrderTopicDO(ClusterDO clusterDO,
String applicant,
OrderTopicModel orderTopicModel) {
OrderTopicDO orderTopicDO = new OrderTopicDO();
orderTopicDO.setId(orderTopicModel.getOrderId());
orderTopicDO.setClusterId(clusterDO.getId());
orderTopicDO.setClusterName(clusterDO.getClusterName());
orderTopicDO.setTopicName(orderTopicModel.getTopicName());
orderTopicDO.setApplicant(applicant);
orderTopicDO.setPrincipals(ListUtils.strList2String(orderTopicModel.getPrincipalList()));
orderTopicDO.setPeakBytesIn(orderTopicModel.getPeakBytesIn().longValue());
orderTopicDO.setPartitionNum(0);
orderTopicDO.setRetentionTime(orderTopicModel.getRetentionTime() * 60 * 60 * 1000);
orderTopicDO.setReplicaNum(3);
orderTopicDO.setDescription(orderTopicModel.getDescription());
return orderTopicDO;
}
public static List<OrderTopicVO> convert2OrderTopicVOList(List<OrderTopicDO> orderTopicDOList) {
if (orderTopicDOList == null) {
return new ArrayList<>();
}
List<OrderTopicVO> orderTopicVOList = new ArrayList<>();
for (OrderTopicDO orderTopicDO: orderTopicDOList) {
OrderTopicVO orderTopicVO = new OrderTopicVO();
CopyUtils.copyProperties(orderTopicVO, orderTopicDO);
orderTopicVO.setOrderId(orderTopicDO.getId());
orderTopicVO.setGmtCreate(orderTopicDO.getGmtCreate().getTime());
orderTopicVO.setGmtModify(orderTopicDO.getGmtModify().getTime());
orderTopicVOList.add(orderTopicVO);
}
return orderTopicVOList;
}
public static OrderPartitionDO convert2OrderPartitionDO(ClusterDO clusterDO,
String applicant,
OrderPartitionModel orderPartitionModel) {
OrderPartitionDO orderPartitionDO = new OrderPartitionDO();
orderPartitionDO.setId(orderPartitionModel.getOrderId());
orderPartitionDO.setClusterId(clusterDO.getId());
orderPartitionDO.setClusterName(clusterDO.getClusterName());
orderPartitionDO.setTopicName(orderPartitionModel.getTopicName());
orderPartitionDO.setApplicant(applicant);
orderPartitionDO.setPeakBytesIn(orderPartitionModel.getPredictBytesIn().longValue());
orderPartitionDO.setOrderStatus(OrderStatusEnum.WAIT_DEAL.getCode());
orderPartitionDO.setDescription(orderPartitionModel.getDescription());
return orderPartitionDO;
}
public static List<OrderPartitionVO> convert2OrderPartitionVOList(List<OrderPartitionDO> orderPartitionDOList) {
if (orderPartitionDOList == null) {
return new ArrayList<>();
}
List<OrderPartitionVO> orderPartitionVOList = new ArrayList<>();
for (OrderPartitionDO orderPartitionDO: orderPartitionDOList) {
orderPartitionVOList.add(
convert2OrderPartitionVO(orderPartitionDO, null,null, null)
);
}
return orderPartitionVOList;
}
public static OrderPartitionVO convert2OrderPartitionVO(OrderPartitionDO orderPartitionDO,
TopicMetadata topicMetadata,
Long maxAvgBytes, List<RegionDO> regionDOList) {
if (orderPartitionDO == null) {
return null;
}
OrderPartitionVO orderPartitionVO = new OrderPartitionVO();
CopyUtils.copyProperties(orderPartitionVO, orderPartitionDO);
orderPartitionVO.setOrderId(orderPartitionDO.getId());
orderPartitionVO.setPredictBytesIn(orderPartitionDO.getPeakBytesIn());
orderPartitionVO.setGmtCreate(orderPartitionDO.getGmtCreate().getTime());
orderPartitionVO.setGmtModify(orderPartitionDO.getGmtModify().getTime());
orderPartitionVO.setRealBytesIn(maxAvgBytes);
if (topicMetadata == null) {
return orderPartitionVO;
}
orderPartitionVO.setPartitionNum(topicMetadata.getPartitionNum());
orderPartitionVO.setBrokerIdList(new ArrayList<>(topicMetadata.getBrokerIdSet()));
if (regionDOList == null || regionDOList.isEmpty()) {
orderPartitionVO.setRegionNameList(new ArrayList<>());
orderPartitionVO.setRegionBrokerIdList(new ArrayList<>());
return orderPartitionVO;
}
Set<String> regionNameSet = new HashSet<>();
Set<Integer> brokerIdSet = new HashSet<>();
for (RegionDO regionDO: regionDOList) {
regionNameSet.add(regionDO.getRegionName());
if (StringUtils.isEmpty(regionDO.getBrokerList())) {
continue;
}
brokerIdSet.addAll(ListUtils.string2IntList(regionDO.getBrokerList()));
}
orderPartitionVO.setRegionNameList(new ArrayList<>(regionNameSet));
orderPartitionVO.setRegionBrokerIdList(new ArrayList<>(brokerIdSet));
return orderPartitionVO;
}
public static TopicDO convert2TopicInfoDO(OrderTopicDO orderTopicDO) {
TopicDO topicDO = new TopicDO();
topicDO.setClusterId(orderTopicDO.getClusterId());
topicDO.setTopicName(orderTopicDO.getTopicName());
topicDO.setApplicant(orderTopicDO.getApplicant());
topicDO.setPrincipals(orderTopicDO.getPrincipals());
topicDO.setDescription(orderTopicDO.getDescription());
topicDO.setStatus(DBStatusEnum.NORMAL.getStatus());
return topicDO;
}
}

View File

@@ -0,0 +1,58 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.po.RegionDO;
import com.xiaojukeji.kafka.manager.service.utils.ListUtils;
import com.xiaojukeji.kafka.manager.web.model.RegionModel;
import com.xiaojukeji.kafka.manager.web.vo.RegionVO;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 19/4/3
*/
public class RegionModelConverter {
private static RegionVO convert2RegionVO(RegionDO regionDO) {
if (regionDO == null) {
return null;
}
RegionVO regionVO = new RegionVO();
regionVO.setRegionId(regionDO.getId());
regionVO.setClusterId(regionDO.getClusterId());
regionVO.setRegionName(regionDO.getRegionName());
regionVO.setLevel(regionDO.getLevel());
regionVO.setBrokerIdList(ListUtils.string2IntList(regionDO.getBrokerList()));
regionVO.setDescription(regionDO.getDescription());
regionVO.setOperator(regionDO.getOperator());
regionVO.setStatus(regionDO.getStatus());
regionVO.setGmtCreate(regionDO.getGmtCreate().getTime());
regionVO.setGmtModify(regionDO.getGmtModify().getTime());
return regionVO;
}
public static List<RegionVO> convert2RegionVOList(List<RegionDO> regionDOList) {
if (regionDOList == null) {
return new ArrayList<>();
}
List<RegionVO> regionInfoVOList = new ArrayList<>();
for (RegionDO regionDO: regionDOList) {
regionInfoVOList.add(convert2RegionVO(regionDO));
}
return regionInfoVOList;
}
public static RegionDO convert2RegionDO(RegionModel regionModel, String operator) {
RegionDO regionDO = new RegionDO();
regionDO.setId(regionModel.getRegionId());
regionDO.setBrokerList(ListUtils.intList2String(regionModel.getBrokerIdList()));
regionDO.setClusterId(regionModel.getClusterId());
regionDO.setLevel(regionModel.getLevel());
regionDO.setDescription(regionModel.getDescription());
regionDO.setOperator(operator);
regionDO.setStatus(regionModel.getStatus() == null? 0: regionModel.getStatus());
regionDO.setRegionName(regionModel.getRegionName());
return regionDO;
}
}

View File

@@ -0,0 +1,245 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.web.model.topic.TopicFavorite;
import com.xiaojukeji.kafka.manager.common.entity.dto.PartitionOffsetDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.TopicBasicDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.TopicOverviewDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.TopicPartitionDTO;
import com.xiaojukeji.kafka.manager.common.entity.metrics.TopicMetrics;
import com.xiaojukeji.kafka.manager.common.entity.po.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.po.RegionDO;
import com.xiaojukeji.kafka.manager.common.entity.po.TopicDO;
import com.xiaojukeji.kafka.manager.common.entity.po.TopicFavoriteDO;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.BrokerMetadata;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.PartitionMap;
import com.xiaojukeji.kafka.manager.common.entity.zookeeper.TopicMetadata;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.service.cache.ClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.utils.ListUtils;
import com.xiaojukeji.kafka.manager.web.vo.topic.*;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author arthur
* @date 2017/6/1.
*/
public class TopicModelConverter {
public static List<TopicFavoriteDO> convert2TopicFavoriteDOList(String username,
List<TopicFavorite> topicFavoriteList) {
if (topicFavoriteList == null) {
return new ArrayList<>();
}
List<TopicFavoriteDO> topicFavoriteDOList = new ArrayList<>();
for (TopicFavorite topicFavorite: topicFavoriteList) {
TopicFavoriteDO topicFavoriteDO = new TopicFavoriteDO();
topicFavoriteDO.setTopicName(topicFavorite.getTopicName());
topicFavoriteDO.setClusterId(topicFavorite.getClusterId());
topicFavoriteDO.setUsername(username);
topicFavoriteDOList.add(topicFavoriteDO);
}
return topicFavoriteDOList;
}
public static TopicBasicVO convert2TopicBasicVO(TopicBasicDTO topicBasicDTO,
TopicDO topicDO,
List<RegionDO> regionList) {
TopicBasicVO topicBasicVO = new TopicBasicVO();
CopyUtils.copyProperties(topicBasicVO, topicBasicDTO);
if (topicDO != null) {
topicBasicVO.setDescription(topicDO.getDescription());
topicBasicVO.setPrincipals(topicDO.getPrincipals());
}
if (regionList == null) {
return topicBasicVO;
}
List<String> regionNameList = regionList.stream().map(elem -> elem.getRegionName()).collect(Collectors.toList());
topicBasicVO.setRegionNames(ListUtils.strList2String(regionNameList));
return topicBasicVO;
}
public static List<TopicOverviewVO> convert2TopicOverviewVOList(ClusterDO cluster,
List<TopicOverviewDTO> topicOverviewDTOList,
List<TopicDO> topicDOList,
List<TopicFavoriteDO> topicFavoriteDOList) {
if (topicOverviewDTOList == null) {
return new ArrayList<>();
}
Map<String, TopicFavoriteDO> favoriteMap = new HashMap<>(0);
if (topicDOList != null) {
favoriteMap = topicFavoriteDOList.stream().filter(elem -> cluster.getId().equals(elem.getClusterId())).collect(Collectors.toMap(TopicFavoriteDO::getTopicName, elem -> elem));
}
Map<String, String> principalMap = new HashMap<>(0);
if (topicDOList != null) {
principalMap = topicDOList.stream().collect(Collectors.toMap(TopicDO::getTopicName, TopicDO::getPrincipals));
}
List<TopicOverviewVO> topicInfoVOList = new ArrayList<>();
for (TopicOverviewDTO topicOverviewDTO: topicOverviewDTOList) {
TopicOverviewVO topicInfoVO = new TopicOverviewVO();
topicInfoVO.setClusterId(cluster.getId());
topicInfoVO.setClusterName(cluster.getClusterName());
topicInfoVO.setTopicName(topicOverviewDTO.getTopicName());
topicInfoVO.setPartitionNum(topicOverviewDTO.getPartitionNum());
topicInfoVO.setUpdateTime(topicOverviewDTO.getUpdateTime());
topicInfoVO.setReplicaNum(topicOverviewDTO.getReplicaNum());
topicInfoVO.setByteIn(topicOverviewDTO.getBytesInPerSec());
topicInfoVO.setProduceRequest(topicOverviewDTO.getProduceRequestPerSec());
topicInfoVO.setPrincipals(principalMap.get(topicOverviewDTO.getTopicName()));
if (favoriteMap.containsKey(topicOverviewDTO.getTopicName())) {
topicInfoVO.setFavorite(Boolean.TRUE);
} else {
topicInfoVO.setFavorite(Boolean.FALSE);
}
topicInfoVOList.add(topicInfoVO);
}
return topicInfoVOList;
}
/**
* 构建TopicBrokerVO
*/
public static List<TopicBrokerVO> convert2TopicBrokerVOList(ClusterDO clusterDO,
TopicMetadata topicMetadata,
List<TopicPartitionDTO> topicPartitionDTOList){
if(clusterDO == null || topicMetadata == null){
return new ArrayList<>();
}
PartitionMap partitionMap = topicMetadata.getPartitionMap();
Map<Integer, TopicBrokerVO> brokerIdTopicBrokerVOMap = new HashMap<>();
for (Integer brokerId: topicMetadata.getBrokerIdSet()) {
TopicBrokerVO topicBrokerVO = new TopicBrokerVO();
topicBrokerVO.setBrokerId(brokerId);
List<Integer> partitionIdList = new ArrayList<>();
for (Integer partitionId: partitionMap.getPartitions().keySet()) {
if (partitionMap.getPartitions().get(partitionId).contains(brokerId)) {
partitionIdList.add(partitionId);
}
}
topicBrokerVO.setPartitionIdList(partitionIdList);
topicBrokerVO.setPartitionNum(partitionIdList.size());
topicBrokerVO.setLeaderPartitionIdList(new ArrayList<Integer>());
// 设置Broker的主机名
BrokerMetadata brokerMetadata = ClusterMetadataManager.getBrokerMetadata(clusterDO.getId(), brokerId);
if (brokerMetadata != null) {
topicBrokerVO.setHost(brokerMetadata.getHost());
} else {
topicBrokerVO.setHost("");
}
brokerIdTopicBrokerVOMap.put(brokerId, topicBrokerVO);
}
for (TopicPartitionDTO topicPartitionDTO: topicPartitionDTOList) {
Integer leaderBrokerId = topicPartitionDTO.getLeaderBrokerId();
if (!brokerIdTopicBrokerVOMap.containsKey(leaderBrokerId)) {
// 存在异常分区
continue;
}
brokerIdTopicBrokerVOMap.get(leaderBrokerId).getLeaderPartitionIdList().add(topicPartitionDTO.getPartitionId());
}
return new ArrayList<>(brokerIdTopicBrokerVOMap.values());
}
public static TopicRealTimeMetricsVO convert2TopicRealTimeMetricsVO(TopicMetrics topicMetrics){
TopicRealTimeMetricsVO topicRealTimeMetricsVO = new TopicRealTimeMetricsVO();
List<Double> messageIn = new ArrayList<>();
messageIn.add(topicMetrics.getMessagesInPerSecMeanRate());
messageIn.add(topicMetrics.getMessagesInPerSec());
messageIn.add(topicMetrics.getMessagesInPerSecFiveMinuteRate());
messageIn.add(topicMetrics.getMessagesInPerSecFifteenMinuteRate());
topicRealTimeMetricsVO.setMessageIn(messageIn);
List<Double> byteIn = new ArrayList<>();
byteIn.add(topicMetrics.getBytesInPerSecMeanRate());
byteIn.add(topicMetrics.getBytesInPerSec());
byteIn.add(topicMetrics.getBytesInPerSecFiveMinuteRate());
byteIn.add(topicMetrics.getBytesInPerSecFifteenMinuteRate());
topicRealTimeMetricsVO.setByteIn(byteIn);
List<Double> byteOut = new ArrayList<>();
byteOut.add(topicMetrics.getBytesOutPerSecMeanRate());
byteOut.add(topicMetrics.getBytesOutPerSec());
byteOut.add(topicMetrics.getBytesOutPerSecFiveMinuteRate());
byteOut.add(topicMetrics.getBytesOutPerSecFiveMinuteRate());
topicRealTimeMetricsVO.setByteOut(byteOut);
List<Double> byteRejected = new ArrayList<>();
byteRejected.add(topicMetrics.getBytesRejectedPerSecMeanRate());
byteRejected.add(topicMetrics.getBytesRejectedPerSec());
byteRejected.add(topicMetrics.getBytesRejectedPerSecFiveMinuteRate());
byteRejected.add(topicMetrics.getBytesRejectedPerSecFifteenMinuteRate());
topicRealTimeMetricsVO.setByteRejected(byteRejected);
List<Double> failedFetchRequest = new ArrayList<>();
failedFetchRequest.add(topicMetrics.getFailFetchRequestPerSecMeanRate());
failedFetchRequest.add(topicMetrics.getFailFetchRequestPerSec());
failedFetchRequest.add(topicMetrics.getFailFetchRequestPerSecFiveMinuteRate());
failedFetchRequest.add(topicMetrics.getFailFetchRequestPerSecFifteenMinuteRate());
topicRealTimeMetricsVO.setFailedFetchRequest(failedFetchRequest);
List<Double> failedProduceRequest = new ArrayList<>();
failedProduceRequest.add(topicMetrics.getFailProduceRequestPerSecMeanRate());
failedProduceRequest.add(topicMetrics.getFailProduceRequestPerSec());
failedProduceRequest.add(topicMetrics.getFailProduceRequestPerSecFiveMinuteRate());
failedProduceRequest.add(topicMetrics.getFailFetchRequestPerSecFifteenMinuteRate());
topicRealTimeMetricsVO.setFailedProduceRequest(failedProduceRequest);
List<Double> totalProduceRequest = new ArrayList<>();
totalProduceRequest.add(topicMetrics.getTotalProduceRequestsPerSecMeanRate());
totalProduceRequest.add(topicMetrics.getTotalProduceRequestsPerSec());
totalProduceRequest.add(topicMetrics.getTotalProduceRequestsPerSecFiveMinuteRate());
totalProduceRequest.add(topicMetrics.getTotalProduceRequestsPerSecFifteenMinuteRate());
topicRealTimeMetricsVO.setTotalProduceRequest(totalProduceRequest);
List<Double> totalFetchRequest = new ArrayList<>();
totalFetchRequest.add(topicMetrics.getTotalFetchRequestsPerSecMeanRate());
totalFetchRequest.add(topicMetrics.getTotalFetchRequestsPerSec());
totalFetchRequest.add(topicMetrics.getTotalFetchRequestsPerSecFiveMinuteRate());
totalFetchRequest.add(topicMetrics.getTotalFetchRequestsPerSecFifteenMinuteRate());
topicRealTimeMetricsVO.setTotalFetchRequest(totalFetchRequest);
return topicRealTimeMetricsVO;
}
public static List<TopicOffsetVO> convert2TopicOffsetVOList(Long clusterId,
String topicName,
List<PartitionOffsetDTO> partitionOffsetDTOList) {
List<TopicOffsetVO> topicOffsetVOList = new ArrayList<>();
for (PartitionOffsetDTO partitionOffsetDTO: partitionOffsetDTOList) {
topicOffsetVOList.add(new TopicOffsetVO(clusterId, topicName, partitionOffsetDTO.getPartitionId(), partitionOffsetDTO.getOffset(), partitionOffsetDTO.getTimestamp()));
}
return topicOffsetVOList;
}
public static List<TopicPartitionVO> convert2TopicPartitionVOList(List<TopicPartitionDTO> topicPartitionDTOList) {
List<TopicPartitionVO> topicPartitionVOList = new ArrayList<>();
for (TopicPartitionDTO topicPartitionDTO: topicPartitionDTOList) {
TopicPartitionVO topicPartitionVO = new TopicPartitionVO();
topicPartitionVO.setPartitionId(topicPartitionDTO.getPartitionId());
topicPartitionVO.setOffset(topicPartitionDTO.getOffset());
topicPartitionVO.setLeaderBrokerId(topicPartitionDTO.getLeaderBrokerId());
topicPartitionVO.setPreferredBrokerId(topicPartitionDTO.getPreferredBrokerId());
topicPartitionVO.setLeaderEpoch(topicPartitionDTO.getLeaderEpoch());
topicPartitionVO.setReplicaBrokerIdList(topicPartitionDTO.getReplicasBroker());
topicPartitionVO.setIsrBrokerIdList(topicPartitionDTO.getIsr());
topicPartitionVO.setUnderReplicated(topicPartitionDTO.isUnderReplicated());
topicPartitionVOList.add(topicPartitionVO);
}
return topicPartitionVOList;
}
public static TopicMetadataVO convert2TopicMetadataVO(Long clusterId, TopicMetadata topicMetadata) {
TopicMetadataVO topicMetadataVO = new TopicMetadataVO();
topicMetadataVO.setClusterId(clusterId);
topicMetadataVO.setTopicName(topicMetadata.getTopic());
topicMetadataVO.setBrokerIdList(new ArrayList<>(topicMetadata.getBrokerIdSet()));
topicMetadataVO.setReplicaNum(topicMetadata.getReplicaNum());
topicMetadataVO.setPartitionNum(topicMetadata.getPartitionNum());
topicMetadataVO.setModifyTime(topicMetadata.getModifyTime());
topicMetadataVO.setCreateTime(topicMetadata.getCreateTime());
return topicMetadataVO;
}
}

View File

@@ -0,0 +1,65 @@
package com.xiaojukeji.kafka.manager.web.inteceptor;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.AccountRoleEnum;
import com.xiaojukeji.kafka.manager.service.service.LoginService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.HandlerInterceptor;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
/**
* 拦截器
* @author huangyiminghappy@163.com
* @date 19/4/29
*/
@Component
public class PermissionInterceptor implements HandlerInterceptor {
private final static Logger logger = LoggerFactory.getLogger(PermissionInterceptor.class);
@Autowired
private LoginService loginService;
/**
* 拦截预处理
* @author zengqiao
* @date 19/4/29
* @return boolean false:拦截, 不向下执行, true:放行
*/
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
String uri = request.getRequestURI();
if (!uri.contains("api")) {
response.sendRedirect("/login");
return false;
}
if (uri.contains("api/v1/login")) {
return true;
}
HttpSession session = request.getSession();
String username = (String) session.getAttribute("username");
AccountRoleEnum userRoleEnum = (AccountRoleEnum) session.getAttribute("role");
if (userRoleEnum == null || AccountRoleEnum.UNKNOWN.equals(userRoleEnum) || !loginService.isLogin(username)) {
response.sendRedirect("/login");
return false;
}
if (uri.contains("admin") && userRoleEnum.getRole() <= AccountRoleEnum.NORMAL.getRole()) {
// SRE及以上的用户, 才可查看admin相关的接口
response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "无权限访问");
return false;
}
if (uri.contains("admin/accounts") && !AccountRoleEnum.ADMIN.equals(userRoleEnum)) {
// 非Admin用户, 不可查看账号信息
response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "无权限访问");
return false;
}
return true;
}
}

View File

@@ -0,0 +1,47 @@
package com.xiaojukeji.kafka.manager.web.inteceptor;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.http.HttpServletRequest;
/**
* Web Api Metrics信息统计拦截器
* @author zengqiao
* @date 20/1/11
*/
@Aspect
@Component
public class WebMetricsInterceptor {
private final static Logger logger = LoggerFactory.getLogger(Constant.API_METRICS_LOGGER);
/**
* 切入点
*/
private static final String PointCut = "execution(* com.xiaojukeji.kafka.manager.web.api.versionone.*..*(..))";
@Pointcut(value = PointCut)
public void pointcut() {
}
@Around("pointcut()")
public Object doAround(ProceedingJoinPoint proceedingJoinPoint) throws Throwable {
long startTime = System.currentTimeMillis();
Object object = proceedingJoinPoint.proceed();
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
HttpServletRequest request = attributes.getRequest();
logger.info("URI:{} HTTP-Method:{} Remote-IP:{} Time-Cost:{}ms", request.getRequestURI(), request.getMethod(), request.getRemoteAddr(), System.currentTimeMillis() - startTime);
return object;
}
}

View File

@@ -0,0 +1,83 @@
package com.xiaojukeji.kafka.manager.web.model;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.util.StringUtils;
/**
* @author zengqiao
* @date 19/5/3
*/
@ApiModel(value = "AccountModel", description = "用户")
public class AccountModel {
@ApiModelProperty(value = "用户名")
private String username;
@ApiModelProperty(value = "新密码, 1.创建账号时, 可不传")
private String password;
@ApiModelProperty(value = "旧密码")
private String oldPassword;
@ApiModelProperty(value = "角色[0:普通, 1:运维, 2:管理员]")
private Integer role;
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getOldPassword() {
return oldPassword;
}
public void setOldPassword(String oldPassword) {
this.oldPassword = oldPassword;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Integer getRole() {
return role;
}
public void setRole(Integer role) {
this.role = role;
}
@Override
public String toString() {
return "AccountModel{" +
"username='" + username + '\'' +
", oldPassword='" + oldPassword + '\'' +
", password='" + password + '\'' +
", role=" + role +
'}';
}
public boolean insertLegal() {
if (StringUtils.isEmpty(username)
|| StringUtils.isEmpty(password)
|| !(role == 0 || role == 1 || role == 2)) {
return false;
}
return true;
}
public boolean modifyLegal() {
if (StringUtils.isEmpty(username)
|| !(role == 0 || role == 1 || role == 2)) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,139 @@
package com.xiaojukeji.kafka.manager.web.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.util.StringUtils;
/**
* cluster model
* @author zengqiao
* @date 19/4/15
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "ClusterModel", description = "cluster model")
public class ClusterModel {
@ApiModelProperty(value = "集群ID")
private Long clusterId;
@ApiModelProperty(value = "集群名称")
private String clusterName;
@ApiModelProperty(value = "ZK地址")
private String zookeeper;
@ApiModelProperty(value = "kafka版本")
private String kafkaVersion;
@ApiModelProperty(value = "bootstrapServers地址")
private String bootstrapServers;
@ApiModelProperty(value = "开启告警[0:不开启, 1:开启]")
private Integer alarmFlag;
@ApiModelProperty(value = "安全协议")
private String securityProtocol;
@ApiModelProperty(value = "SASL机制")
private String saslMechanism;
@ApiModelProperty(value = "SASL JAAS配置")
private String saslJaasConfig;
public Integer getAlarmFlag() {
return alarmFlag;
}
public void setAlarmFlag(Integer alarmFlag) {
this.alarmFlag = alarmFlag;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getZookeeper() {
return zookeeper;
}
public void setZookeeper(String zookeeper) {
this.zookeeper = zookeeper;
}
public String getKafkaVersion() {
return kafkaVersion;
}
public void setKafkaVersion(String kafkaVersion) {
this.kafkaVersion = kafkaVersion;
}
public String getBootstrapServers() {
return bootstrapServers;
}
public void setBootstrapServers(String bootstrapServers) {
this.bootstrapServers = bootstrapServers;
}
public String getSecurityProtocol() {
return securityProtocol;
}
public void setSecurityProtocol(String securityProtocol) {
this.securityProtocol = securityProtocol;
}
public String getSaslMechanism() {
return saslMechanism;
}
public void setSaslMechanism(String saslMechanism) {
this.saslMechanism = saslMechanism;
}
public String getSaslJaasConfig() {
return saslJaasConfig;
}
public void setSaslJaasConfig(String saslJaasConfig) {
this.saslJaasConfig = saslJaasConfig;
}
@Override
public String toString() {
return "ClusterModel{" +
"clusterId=" + clusterId +
", clusterName='" + clusterName + '\'' +
", zookeeper='" + zookeeper + '\'' +
", kafkaVersion='" + kafkaVersion + '\'' +
", bootstrapServers='" + bootstrapServers + '\'' +
", alarmFlag=" + alarmFlag +
", securityProtocol='" + securityProtocol + '\'' +
", saslMechanism='" + saslMechanism + '\'' +
", saslJaasConfig='" + saslJaasConfig + '\'' +
'}';
}
public boolean legal() {
if (StringUtils.isEmpty(clusterName)
|| StringUtils.isEmpty(zookeeper)
|| StringUtils.isEmpty(kafkaVersion)
|| StringUtils.isEmpty(bootstrapServers)) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,48 @@
package com.xiaojukeji.kafka.manager.web.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import org.springframework.util.StringUtils;
/**
* @author zengqiao
* @date 19/5/3
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "LoginModel", description = "登陆")
public class LoginModel {
private String username;
private String password;
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
@Override
public String toString() {
return "LoginModel{" +
"username='" + username + '\'' +
", password='" + password + '\'' +
'}';
}
public boolean legal() {
if (StringUtils.isEmpty(username) || StringUtils.isEmpty(password)) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,93 @@
package com.xiaojukeji.kafka.manager.web.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.util.StringUtils;
import java.util.List;
/**
* 迁移(Topic迁移/Partition迁移)
* @author zengqiao_cn@163.com
* @date 19/4/9
*/
@ApiModel(value = "MigrationCreateModel", description = "创建迁移任务")
@JsonIgnoreProperties(ignoreUnknown = true)
public class MigrationCreateModel {
@ApiModelProperty(value = "集群ID")
private Long clusterId;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "限流值(B/s)")
private Long throttle;
@ApiModelProperty(value = "目标BrokerID列表")
private List<Integer> brokerIdList;
@ApiModelProperty(value = "备注")
private String description;
@ApiModelProperty(value = "分区ID")
private List<Integer> partitionIdList;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Long getThrottle() {
return throttle;
}
public void setThrottle(Long throttle) {
this.throttle = throttle;
}
public List<Integer> getBrokerIdList() {
return brokerIdList;
}
public void setBrokerIdList(List<Integer> brokerIdList) {
this.brokerIdList = brokerIdList;
}
public List<Integer> getPartitionIdList() {
return partitionIdList;
}
public void setPartitionIdList(List<Integer> partitionIdList) {
this.partitionIdList = partitionIdList;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public boolean legal() {
if (clusterId == null
|| StringUtils.isEmpty(topicName)
|| throttle == null || throttle <= 0
|| brokerIdList == null) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,56 @@
package com.xiaojukeji.kafka.manager.web.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* 迁移Model
* @author zengqiao_cn@163.com
* @date 19/4/17
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "MigrationModel", description = "迁移Model")
public class MigrationModel {
@ApiModelProperty(value = "任务Id")
private Long taskId;
@ApiModelProperty(value = "动作[start|modify|cancel]")
private String action;
@ApiModelProperty(value = "限流值[会覆盖上一次的限流值]")
private Long throttle;
public Long getTaskId() {
return taskId;
}
public void setTaskId(Long taskId) {
this.taskId = taskId;
}
public String getAction() {
return action;
}
public void setAction(String action) {
this.action = action;
}
public Long getThrottle() {
return throttle;
}
public void setThrottle(Long throttle) {
this.throttle = throttle;
}
@Override
public String toString() {
return "MigrationModel{" +
"taskId=" + taskId +
", action='" + action + '\'' +
", throttle=" + throttle +
'}';
}
}

View File

@@ -0,0 +1,98 @@
package com.xiaojukeji.kafka.manager.web.model;
import com.xiaojukeji.kafka.manager.common.constant.OffsetStoreLocation;
import com.xiaojukeji.kafka.manager.common.entity.dto.PartitionOffsetDTO;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.util.StringUtils;
import java.util.List;
/**
* 重置offset
* @author zengqiao
* @date 19/4/8
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "OffsetResetModel", description = "重置消费偏移")
public class OffsetResetModel {
@ApiModelProperty(value = "集群Id")
private Long clusterId;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "消费组")
private String consumerGroup;
@ApiModelProperty(value = "存储位置")
private String location;
@ApiModelProperty(value = "重置到指定offset")
private List<PartitionOffsetDTO> offsetList;
@ApiModelProperty(value = "重置到指定时间")
private Long timestamp;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public String getConsumerGroup() {
return consumerGroup;
}
public void setConsumerGroup(String consumerGroup) {
this.consumerGroup = consumerGroup;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
public List<PartitionOffsetDTO> getOffsetList() {
return offsetList;
}
public void setOffsetList(List<PartitionOffsetDTO> offsetList) {
this.offsetList = offsetList;
}
public Long getTimestamp() {
return timestamp;
}
public void setTimestamp(Long timestamp) {
this.timestamp = timestamp;
}
public boolean legal() {
if (clusterId == null
|| StringUtils.isEmpty(topicName)
|| StringUtils.isEmpty(consumerGroup)
|| OffsetStoreLocation.getOffsetStoreLocation(location) == null) {
return false;
}
if (timestamp == null && offsetList == null) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,78 @@
package com.xiaojukeji.kafka.manager.web.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* rebalance请求参数
* @author zengqiao
* @date 19/7/8
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "RebalanceModel", description = "rebalance模型")
public class RebalanceModel {
@ApiModelProperty(value = "集群Id")
private Long clusterId;
@ApiModelProperty(value = "brokerId")
private Integer brokerId;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "维度[0: 集群维度, 1: broker维度]")
private Integer dimension;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public Integer getBrokerId() {
return brokerId;
}
public void setBrokerId(Integer brokerId) {
this.brokerId = brokerId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Integer getDimension() {
return dimension;
}
public void setDimension(Integer dimension) {
this.dimension = dimension;
}
@Override
public String toString() {
return "RebalanceModel{" +
"clusterId=" + clusterId +
", brokerId=" + brokerId +
", topicName=" + topicName +
", dimension=" + dimension +
'}';
}
public boolean legal() {
if (dimension == null || clusterId == null) {
return false;
}
if (dimension.equals(1) && brokerId == null) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,120 @@
package com.xiaojukeji.kafka.manager.web.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.apache.commons.lang.StringUtils;
import java.util.List;
/**
* 创建Region
* @author zengqiao
* @date 19/4/3
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "RegionModel", description = "创建Region")
public class RegionModel {
@ApiModelProperty(value = "RegionId, 更新时必须传")
private Long regionId;
@ApiModelProperty(value = "集群ID")
private Long clusterId;
@ApiModelProperty(value = "regionName名称")
private String regionName;
@ApiModelProperty(value = "重要级别, 0:普通, 1:重要2:重要")
private Integer level;
@ApiModelProperty(value = "状态, -1:废弃 0:正常 1:容量已满")
private Integer status;
@ApiModelProperty(value = "brokerId列表")
private List<Integer> brokerIdList;
@ApiModelProperty(value = "备注")
private String description;
public Long getRegionId() {
return regionId;
}
public void setRegionId(Long regionId) {
this.regionId = regionId;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getRegionName() {
return regionName;
}
public void setRegionName(String regionName) {
this.regionName = regionName;
}
public Integer getLevel() {
return level;
}
public void setLevel(Integer level) {
this.level = level;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public List<Integer> getBrokerIdList() {
return brokerIdList;
}
public void setBrokerIdList(List<Integer> brokerIdList) {
this.brokerIdList = brokerIdList;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public boolean legal() {
if (clusterId == null
|| StringUtils.isEmpty(regionName)
|| level == null
|| brokerIdList == null || brokerIdList.isEmpty()) {
return false;
}
if (description == null) {
description = "";
}
return true;
}
@Override
public String toString() {
return "RegionModel{" +
"regionId=" + regionId +
", clusterId=" + clusterId +
", regionName='" + regionName + '\'' +
", level=" + level +
", status=" + status +
", brokerIdList=" + brokerIdList +
", description='" + description + '\'' +
'}';
}
}

View File

@@ -0,0 +1,122 @@
package com.xiaojukeji.kafka.manager.web.model.alarm;
import com.xiaojukeji.kafka.manager.common.entity.dto.alarm.AlarmStrategyActionDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.alarm.AlarmStrategyExpressionDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.alarm.AlarmStrategyFilterDTO;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.apache.commons.lang.StringUtils;
import java.util.List;
/**
* @author zengqiao
* @date 19/5/12
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "AlarmModel", description = "告警模型")
public class AlarmRuleModel {
@ApiModelProperty(value = "告警Id, 修改时必传")
private Long id;
@ApiModelProperty(value = "告警名称")
private String alarmName;
@ApiModelProperty(value = "策略筛选")
private List<AlarmStrategyFilterDTO> strategyFilterList;
@ApiModelProperty(value = "策略表达式")
private List<AlarmStrategyExpressionDTO> strategyExpressionList;
@ApiModelProperty(value = "策略响应")
private List<AlarmStrategyActionDTO> strategyActionList;
@ApiModelProperty(value = "负责人")
private List<String> principalList;
@ApiModelProperty(value = "告警状态, 0:暂停, 1:启用")
private Integer status;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getAlarmName() {
return alarmName;
}
public void setAlarmName(String alarmName) {
this.alarmName = alarmName;
}
public List<AlarmStrategyFilterDTO> getStrategyFilterList() {
return strategyFilterList;
}
public void setStrategyFilterList(List<AlarmStrategyFilterDTO> strategyFilterList) {
this.strategyFilterList = strategyFilterList;
}
public List<AlarmStrategyExpressionDTO> getStrategyExpressionList() {
return strategyExpressionList;
}
public void setStrategyExpressionList(List<AlarmStrategyExpressionDTO> strategyExpressionList) {
this.strategyExpressionList = strategyExpressionList;
}
public List<AlarmStrategyActionDTO> getStrategyActionList() {
return strategyActionList;
}
public void setStrategyActionList(List<AlarmStrategyActionDTO> strategyActionList) {
this.strategyActionList = strategyActionList;
}
public List<String> getPrincipalList() {
return principalList;
}
public void setPrincipalList(List<String> principalList) {
this.principalList = principalList;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public boolean legal() {
if (StringUtils.isEmpty(alarmName)
|| principalList == null
|| strategyExpressionList == null || strategyExpressionList.isEmpty()
|| strategyFilterList == null || strategyFilterList.isEmpty()
|| strategyActionList == null || strategyActionList.isEmpty()) {
return false;
}
for (AlarmStrategyFilterDTO model: strategyFilterList) {
if (!model.legal()) {
return false;
}
}
for (AlarmStrategyExpressionDTO model: strategyExpressionList) {
if (!model.legal()) {
return false;
}
}
for (AlarmStrategyActionDTO model: strategyActionList) {
if (!model.legal()) {
return false;
}
}
return true;
}
}

View File

@@ -0,0 +1,135 @@
package com.xiaojukeji.kafka.manager.web.model.order;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.OrderStatusEnum;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.List;
/**
* 执行Topic申请工单
* @author zengqiao
* @date 19/6/26
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "OrderPartitionExecModel", description = "Partition工单审批")
public class OrderPartitionExecModel {
@ApiModelProperty(value = "工单Id")
private Long orderId;
@ApiModelProperty(value = "审批结果, [1:通过, 2:拒绝]")
private Integer orderStatus;
@ApiModelProperty(value = "审批意见, 拒绝时必填")
private String approvalOpinions;
@ApiModelProperty(value = "分区数")
private Integer partitionNum;
@ApiModelProperty(value = "brokerId列表")
private List<Integer> brokerIdList;
@ApiModelProperty(value = "regionId列表")
private List<Long> regionIdList;
public Long getOrderId() {
return orderId;
}
public void setOrderId(Long orderId) {
this.orderId = orderId;
}
public Integer getOrderStatus() {
return orderStatus;
}
public void setOrderStatus(Integer orderStatus) {
this.orderStatus = orderStatus;
}
public String getApprovalOpinions() {
return approvalOpinions;
}
public void setApprovalOpinions(String approvalOpinions) {
this.approvalOpinions = approvalOpinions;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public List<Integer> getBrokerIdList() {
return brokerIdList;
}
public void setBrokerIdList(List<Integer> brokerIdList) {
this.brokerIdList = brokerIdList;
}
public List<Long> getRegionIdList() {
return regionIdList;
}
public void setRegionIdList(List<Long> regionIdList) {
this.regionIdList = regionIdList;
}
@Override
public String toString() {
return "OrderTopicExecModel{" +
"orderId=" + orderId +
", orderStatus=" + orderStatus +
", approvalOpinions=" + approvalOpinions +
", partitionNum=" + partitionNum +
", brokerIdList=" + brokerIdList +
", regionIdList='" + regionIdList + '\'' +
'}';
}
private static Result checkRefuseIllegal(OrderPartitionExecModel that) {
if (StringUtils.isEmpty(that.approvalOpinions)) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, approvalOpinions is empty");
}
return new Result();
}
private static Result checkPassedIllegal(OrderPartitionExecModel that) {
if (that.brokerIdList == null) {
that.brokerIdList = new ArrayList<>();
}
if (that.regionIdList == null) {
that.regionIdList = new ArrayList<>();
}
if (that.partitionNum == null || that.partitionNum <= 0
|| (that.brokerIdList.isEmpty() && that.regionIdList.isEmpty())) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, some filed is illegal");
}
return new Result();
}
public static Result illegal(OrderPartitionExecModel that) {
if (that == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
if (that.orderId == null || that.orderStatus == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, orderId or orderStatus is null");
}
if (OrderStatusEnum.PASSED.getCode().equals(that.orderStatus)) {
return checkPassedIllegal(that);
} else if (OrderStatusEnum.REFUSED.getCode().equals(that.orderStatus)) {
return checkRefuseIllegal(that);
}
return new Result(StatusCode.PARAM_ERROR, "param illegal, orderStatus illegal");
}
}

View File

@@ -0,0 +1,97 @@
package com.xiaojukeji.kafka.manager.web.model.order;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.apache.commons.lang.StringUtils;
/**
* @author zengqiao
* @date 19/6/16
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "OrderPartitionModel", description = "Partition申请工单")
public class OrderPartitionModel {
@ApiModelProperty(value = "orderId, 创建工单时忽略, 更新工单时必须传")
private Long orderId;
@ApiModelProperty(value = "集群Id")
private Long clusterId;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "预计峰值流量(MB/s)")
private Double predictBytesIn;
@ApiModelProperty(value = "备注说明")
private String description;
public Long getOrderId() {
return orderId;
}
public void setOrderId(Long orderId) {
this.orderId = orderId;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Double getPredictBytesIn() {
return predictBytesIn;
}
public void setPredictBytesIn(Double predictBytesIn) {
this.predictBytesIn = predictBytesIn;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override
public String toString() {
return "OrderTopicApplyModel{" +
"orderId=" + orderId +
", clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", predictBytesIn=" + predictBytesIn +
", description='" + description + '\'' +
'}';
}
public boolean modifyLegal() {
if (orderId == null || !createLegal()) {
return false;
}
return true;
}
public boolean createLegal() {
if (clusterId == null || clusterId < 0
|| StringUtils.isEmpty(topicName)
|| predictBytesIn == null || predictBytesIn < 0
|| StringUtils.isEmpty(description)) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,161 @@
package com.xiaojukeji.kafka.manager.web.model.order;
import com.xiaojukeji.kafka.manager.common.constant.StatusCode;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.bizenum.OrderStatusEnum;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.List;
/**
* 执行Topic申请工单
* @author zengqiao
* @date 19/6/26
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "OrderTopicExecModel", description = "Topic工单审批")
public class OrderTopicExecModel {
@ApiModelProperty(value = "工单Id")
private Long orderId;
@ApiModelProperty(value = "审批结果, [1:通过, 2:拒绝]")
private Integer orderStatus;
@ApiModelProperty(value = "审批意见, 拒绝时必填")
private String approvalOpinions;
@ApiModelProperty(value = "分区数")
private Integer partitionNum;
@ApiModelProperty(value = "副本数")
private Integer replicaNum;
@ApiModelProperty(value = "消息保存时间(ms)")
private Long retentionTime;
@ApiModelProperty(value = "brokerId列表")
private List<Integer> brokerIdList;
@ApiModelProperty(value = "regionId列表")
private List<Long> regionIdList;
public Long getOrderId() {
return orderId;
}
public void setOrderId(Long orderId) {
this.orderId = orderId;
}
public Integer getOrderStatus() {
return orderStatus;
}
public void setOrderStatus(Integer orderStatus) {
this.orderStatus = orderStatus;
}
public String getApprovalOpinions() {
return approvalOpinions;
}
public void setApprovalOpinions(String approvalOpinions) {
this.approvalOpinions = approvalOpinions;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Integer getReplicaNum() {
return replicaNum;
}
public void setReplicaNum(Integer replicaNum) {
this.replicaNum = replicaNum;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public List<Integer> getBrokerIdList() {
return brokerIdList;
}
public void setBrokerIdList(List<Integer> brokerIdList) {
this.brokerIdList = brokerIdList;
}
public List<Long> getRegionIdList() {
return regionIdList;
}
public void setRegionIdList(List<Long> regionIdList) {
this.regionIdList = regionIdList;
}
@Override
public String toString() {
return "OrderTopicExecModel{" +
"orderId=" + orderId +
", orderStatus=" + orderStatus +
", approvalOpinions=" + approvalOpinions +
", partitionNum=" + partitionNum +
", replicaNum=" + replicaNum +
", retentionTime=" + retentionTime +
", brokerIdList=" + brokerIdList +
", regionIdList='" + regionIdList + '\'' +
'}';
}
private static Result checkRefuseIllegal(OrderTopicExecModel that) {
if (StringUtils.isEmpty(that.approvalOpinions)) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, approvalOpinions is empty");
}
return new Result();
}
private static Result checkPassedIllegal(OrderTopicExecModel that) {
if (that.brokerIdList == null) {
that.brokerIdList = new ArrayList<>();
}
if (that.regionIdList == null) {
that.regionIdList = new ArrayList<>();
}
if (that.partitionNum == null || that.partitionNum <= 0
|| that.replicaNum == null || that.replicaNum <= 0
|| that.retentionTime == null || that.retentionTime <= 0
|| (that.brokerIdList.isEmpty() && that.regionIdList.isEmpty())) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, some filed is illegal");
}
return new Result();
}
public static Result illegal(OrderTopicExecModel that) {
if (that == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal");
}
if (that.orderId == null || that.orderStatus == null) {
return new Result(StatusCode.PARAM_ERROR, "param illegal, orderId or orderStatus is null");
}
if (OrderStatusEnum.PASSED.getCode().equals(that.orderStatus)) {
return checkPassedIllegal(that);
} else if (OrderStatusEnum.REFUSED.getCode().equals(that.orderStatus)) {
return checkRefuseIllegal(that);
}
return new Result(StatusCode.PARAM_ERROR, "param illegal, orderStatus illegal");
}
}

View File

@@ -0,0 +1,125 @@
package com.xiaojukeji.kafka.manager.web.model.order;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.apache.commons.lang.StringUtils;
import java.util.List;
/**
* @author zengqiao
* @date 19/6/16
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "OrderTopicApplyModel", description = "Topic申请工单")
public class OrderTopicModel {
@ApiModelProperty(value = "工单Id, 创建工单时忽略, 更新工单时必须传")
private Long orderId;
@ApiModelProperty(value = "集群Id")
private Long clusterId;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "负责人列表")
private List<String> principalList;
@ApiModelProperty(value = "流量上限(MB/s)")
private Double peakBytesIn;
@ApiModelProperty(value = "保存时间(H)")
private Long retentionTime;
@ApiModelProperty(value = "备注说明")
private String description;
public Long getOrderId() {
return orderId;
}
public void setOrderId(Long orderId) {
this.orderId = orderId;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public List<String> getPrincipalList() {
return principalList;
}
public void setPrincipalList(List<String> principalList) {
this.principalList = principalList;
}
public Double getPeakBytesIn() {
return peakBytesIn;
}
public void setPeakBytesIn(Double peakBytesIn) {
this.peakBytesIn = peakBytesIn;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override
public String toString() {
return "OrderTopicModel{" +
"orderId=" + orderId +
", clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", principalList='" + principalList + '\'' +
", peakBytesIn=" + peakBytesIn +
", retentionTime=" + retentionTime +
", description='" + description + '\'' +
'}';
}
public boolean modifyLegal() {
if (orderId == null || !createLegal()) {
return false;
}
return true;
}
public boolean createLegal() {
if (clusterId == null
|| StringUtils.isEmpty(topicName)
|| principalList == null || principalList.isEmpty()
|| peakBytesIn == null || peakBytesIn < 0
|| retentionTime == null || retentionTime < 0
|| StringUtils.isEmpty(description)) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,92 @@
package com.xiaojukeji.kafka.manager.web.model.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.util.StringUtils;
import java.util.List;
/**
* @author zengqiao
* @date 20/1/2
*/
@ApiModel(value = "AdminExpandTopicModel")
public class AdminExpandTopicModel {
@ApiModelProperty(value = "集群ID")
private Long clusterId;
@ApiModelProperty(value = "topicName名称")
private String topicName;
@ApiModelProperty(value = "分区数")
private Integer partitionNum;
@ApiModelProperty(value = "brokerId列表")
private List<Integer> brokerIdList;
@ApiModelProperty(value = "regionId列表")
private List<Long> regionIdList;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public List<Integer> getBrokerIdList() {
return brokerIdList;
}
public void setBrokerIdList(List<Integer> brokerIdList) {
this.brokerIdList = brokerIdList;
}
public List<Long> getRegionIdList() {
return regionIdList;
}
public void setRegionIdList(List<Long> regionIdList) {
this.regionIdList = regionIdList;
}
@Override
public String toString() {
return "AdminExpandTopicModel{" +
"clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", partitionNum=" + partitionNum +
", brokerIdList=" + brokerIdList +
", regionIdList=" + regionIdList +
'}';
}
public Boolean legal() {
if (clusterId == null
|| StringUtils.isEmpty(topicName)
|| partitionNum == null || partitionNum <= 0) {
return false;
}
if ((brokerIdList == null || brokerIdList.isEmpty()) && (regionIdList == null || regionIdList.isEmpty())) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,179 @@
package com.xiaojukeji.kafka.manager.web.model.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.util.StringUtils;
import java.util.List;
/**
* @author huangyiminghappy@163.com, zengqiao
* @date 2019-04-21
*/
@ApiModel(value = "AdminTopicModel")
public class AdminTopicModel {
@ApiModelProperty(value = "集群ID")
private Long clusterId;
@ApiModelProperty(value = "topicName名称")
private String topicName;
@ApiModelProperty(value = "分区数")
private Integer partitionNum;
@ApiModelProperty(value = "副本数")
private Integer replicaNum;
@ApiModelProperty(value = "消息保存时间(ms)")
private Long retentionTime;
@ApiModelProperty(value = "brokerId列表")
private List<Integer> brokerIdList;
@ApiModelProperty(value = "负责人列表")
private List<String> principalList;
@ApiModelProperty(value = "备注")
private String description;
@ApiModelProperty(value = "regionId列表")
private List<Long> regionIdList;
@ApiModelProperty(value = "Topic属性列表")
private String properties;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Integer getReplicaNum() {
return replicaNum;
}
public void setReplicaNum(Integer replicaNum) {
this.replicaNum = replicaNum;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public List<Integer> getBrokerIdList() {
return brokerIdList;
}
public void setBrokerIdList(List<Integer> brokerIdList) {
this.brokerIdList = brokerIdList;
}
public List<String> getPrincipalList() {
return principalList;
}
public void setPrincipalList(List<String> principalList) {
this.principalList = principalList;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<Long> getRegionIdList() {
return regionIdList;
}
public void setRegionIdList(List<Long> regionIdList) {
this.regionIdList = regionIdList;
}
public String getProperties() {
return properties;
}
public void setProperties(String properties) {
this.properties = properties;
}
@Override
public String toString() {
return "TopicCreateModel{" +
"clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", partitionNum=" + partitionNum +
", replicaNum=" + replicaNum +
", retentionTime=" + retentionTime +
", brokerIdList=" + brokerIdList +
", principalList=" + principalList +
", description='" + description + '\'' +
", regionIdList=" + regionIdList +
", properties='" + properties + '\'' +
'}';
}
public Boolean createParamLegal() {
if (clusterId == null
|| StringUtils.isEmpty(topicName)
|| partitionNum == null || partitionNum <= 0
|| replicaNum == null || replicaNum <= 0
|| retentionTime == null || retentionTime <= 0
|| principalList == null || principalList.isEmpty()) {
return false;
}
if ((brokerIdList == null || brokerIdList.isEmpty()) && (regionIdList == null || regionIdList.isEmpty())) {
return false;
}
return true;
}
public Boolean modifyConfigParamLegal() {
if (clusterId == null
|| StringUtils.isEmpty(topicName)
|| retentionTime == null || retentionTime <= 0
|| principalList == null || principalList.isEmpty()) {
return false;
}
return true;
}
public Boolean expandParamLegal() {
if (clusterId == null
|| StringUtils.isEmpty(topicName)
|| partitionNum == null || partitionNum <= 0
|| retentionTime == null || retentionTime <= 0
|| principalList == null || principalList.isEmpty()) {
return false;
}
if ((brokerIdList == null || brokerIdList.isEmpty()) && (regionIdList == null || regionIdList.isEmpty())) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,76 @@
package com.xiaojukeji.kafka.manager.web.model.topic;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* Topic采样
* @author zengqiao
* @date 19/4/8
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "TopicDataSampleModel", description = "Topic采样")
public class TopicDataSampleModel {
@ApiModelProperty(value = "分区Id")
private int partitionId = 0;
@ApiModelProperty(value = "最大采样条数[必须小于100]")
private int maxMsgNum = 10;
@ApiModelProperty(value = "采样超时时间[必须小于30000]")
private int timeout = 2000;
@ApiModelProperty(value = "采样的offset")
private long offset = -1;
@ApiModelProperty(value = "是否截断")
private boolean truncate = true;
public int getPartitionId() {
return partitionId;
}
public void setPartitionId(int partitionId) {
this.partitionId = partitionId;
}
public int getMaxMsgNum() {
return maxMsgNum;
}
public void setMaxMsgNum(int maxMsgNum) {
this.maxMsgNum = maxMsgNum;
}
public int getTimeout() {
return timeout;
}
public void setTimeout(int timeout) {
this.timeout = timeout;
}
public long getOffset() {
return offset;
}
public void setOffset(long offset) {
this.offset = offset;
}
public boolean isTruncate() {
return truncate;
}
public void setTruncate(boolean truncate) {
this.truncate = truncate;
}
public boolean legal() {
if (partitionId < 0 || maxMsgNum > 100 || timeout > 30000) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,48 @@
package com.xiaojukeji.kafka.manager.web.model.topic;
import io.swagger.annotations.ApiModelProperty;
import java.util.List;
/**
* @author huangyiminghappy@163.com
* @date 2019-04-21
*/
public class TopicDeleteModel {
@ApiModelProperty(value = "topicName名称列表")
private List<String> topicNameList;
@ApiModelProperty(value = "集群id")
private Long clusterId;
public List<String> getTopicNameList() {
return topicNameList;
}
public void setTopicNameList(List<String> topicNameList) {
this.topicNameList = topicNameList;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
@Override
public String toString() {
return "TopicDeleteModel{" +
"topicNameList=" + topicNameList +
", clusterId=" + clusterId +
'}';
}
public boolean legal() {
if (topicNameList == null || clusterId == null) {
return false;
}
return true;
}
}

View File

@@ -0,0 +1,43 @@
package com.xiaojukeji.kafka.manager.web.model.topic;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* @author zengqiao
* @date 19/7/11
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "TopicFavorite", description = "Topic收藏,取消收藏")
public class TopicFavorite {
@ApiModelProperty(value = "集群ID")
private Long clusterId;
@ApiModelProperty(value = "Topic名称")
private String topicName;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
@Override
public String toString() {
return "TopicFavorite{" +
"clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
'}';
}
}

View File

@@ -0,0 +1,46 @@
package com.xiaojukeji.kafka.manager.web.model.topic;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import org.apache.commons.lang.StringUtils;
import java.util.List;
/**
* @author zengqiao
* @date 19/7/11
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@ApiModel(value = "TopicFavoriteModel", description = "Topic收藏,取消收藏")
public class TopicFavoriteModel {
private List<TopicFavorite> topicFavoriteList;
public List<TopicFavorite> getTopicFavoriteList() {
return topicFavoriteList;
}
public void setTopicFavoriteList(List<TopicFavorite> topicFavoriteList) {
this.topicFavoriteList = topicFavoriteList;
}
@Override
public String toString() {
return "TopicFavoriteModel{" +
"topicFavoriteList=" + topicFavoriteList +
'}';
}
public boolean legal() {
if (topicFavoriteList == null) {
return false;
}
for (TopicFavorite topicFavorite: topicFavoriteList) {
if (topicFavorite.getClusterId() == null
|| topicFavorite.getClusterId() < 0
|| StringUtils.isEmpty(topicFavorite.getTopicName())) {
return false;
}
}
return true;
}
}

View File

@@ -0,0 +1,50 @@
package com.xiaojukeji.kafka.manager.web.vo;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* @author zengqiao
* @date 19/5/3
*/
@ApiModel(value = "AccountVO", description = "账号信息")
public class AccountVO {
@ApiModelProperty(value = "用户名")
private String username;
@ApiModelProperty(value = "密码")
private String password;
@ApiModelProperty(value = "角色, 0:普通用户, 1:运维人员, 2:管理员")
private Integer role;
public AccountVO(String username, String password, Integer role) {
this.username = username;
this.password = password;
this.role = role;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Integer getRole() {
return role;
}
public void setRole(Integer role) {
this.role = role;
}
}

View File

@@ -0,0 +1,73 @@
package com.xiaojukeji.kafka.manager.web.vo;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
/**
* @author huangyiminghappy@163.com
* @date 2019-03-27
*/
@ApiModel(value = "controller信息")
public class KafkaControllerVO implements Serializable {
private static final long serialVersionUID = 7402869683834994137L;
@ApiModelProperty(value = "节点ID")
private Integer brokerId;
@ApiModelProperty(value = "节点地址")
private String host;
@ApiModelProperty(value = "controller版本")
private Integer controllerVersion;
@ApiModelProperty(value = "controller变更时间")
private Long controllerTimestamp;
public static long getSerialVersionUID() {
return serialVersionUID;
}
public Integer getBrokerId() {
return brokerId;
}
public void setBrokerId(Integer brokerId) {
this.brokerId = brokerId;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public Integer getControllerVersion() {
return controllerVersion;
}
public void setControllerVersion(Integer controllerVersion) {
this.controllerVersion = controllerVersion;
}
public Long getControllerTimestamp() {
return controllerTimestamp;
}
public void setControllerTimestamp(Long controllerTimestamp) {
this.controllerTimestamp = controllerTimestamp;
}
@Override
public String toString() {
return "KafkaControllerVO{" +
"brokerId=" + brokerId +
", host='" + host + '\'' +
", controllerVersion=" + controllerVersion +
", controllerTimestamp=" + controllerTimestamp +
'}';
}
}

View File

@@ -0,0 +1,118 @@
package com.xiaojukeji.kafka.manager.web.vo;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* 迁移详细信息
* @author zengqiao
* @date 19/4/16
*/
@ApiModel(value = "迁移详情")
public class MigrationDetailVO {
@ApiModelProperty(value = "任务ID")
private Long taskId;
@ApiModelProperty(value = "集群ID")
private Long clusterId;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "限流信息")
private Long throttle;
@ApiModelProperty(value = "任务状态")
private Integer status;
@ApiModelProperty(value = "迁移分区分配结果")
private Map<Integer, List<Integer>> reassignmentMap;
@ApiModelProperty(value = "迁移进度<partitionId, status>")
private Map<Integer, Integer> migrationStatus;
@ApiModelProperty(value = "任务创建时间")
private Date gmtCreate;
public Long getTaskId() {
return taskId;
}
public void setTaskId(Long taskId) {
this.taskId = taskId;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Long getThrottle() {
return throttle;
}
public void setThrottle(Long throttle) {
this.throttle = throttle;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Map<Integer, List<Integer>> getReassignmentMap() {
return reassignmentMap;
}
public void setReassignmentMap(Map<Integer, List<Integer>> reassignmentMap) {
this.reassignmentMap = reassignmentMap;
}
public Map<Integer, Integer> getMigrationStatus() {
return migrationStatus;
}
public void setMigrationStatus(Map<Integer, Integer> migrationStatus) {
this.migrationStatus = migrationStatus;
}
public Date getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Date gmtCreate) {
this.gmtCreate = gmtCreate;
}
@Override
public String toString() {
return "MigrationDetailVO{" +
"taskId=" + taskId +
", clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", throttle=" + throttle +
", status=" + status +
", reassignmentMap=" + reassignmentMap +
", migrationStatus=" + migrationStatus +
", gmtCreate=" + gmtCreate +
'}';
}
}

View File

@@ -0,0 +1,112 @@
package com.xiaojukeji.kafka.manager.web.vo;
import io.swagger.annotations.ApiModelProperty;
/**
* 迁移任务
* @author zengqiao
* @date 19/7/13
*/
public class MigrationTaskVO {
@ApiModelProperty(value = "任务ID")
private Long taskId;
@ApiModelProperty(value = "集群ID")
private Long clusterId;
@ApiModelProperty(value = "集群名称")
private String clusterName;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "任务状态")
private Integer status;
@ApiModelProperty(value = "限流值")
private Long throttle;
@ApiModelProperty(value = "任务创建时间")
private Long gmtCreate;
@ApiModelProperty(value = "操作人")
private String operator;
public Long getTaskId() {
return taskId;
}
public void setTaskId(Long taskId) {
this.taskId = taskId;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Long getThrottle() {
return throttle;
}
public void setThrottle(Long throttle) {
this.throttle = throttle;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
public String getOperator() {
return operator;
}
public void setOperator(String operator) {
this.operator = operator;
}
@Override
public String toString() {
return "MigrationTaskVO{" +
"taskId=" + taskId +
", clusterId=" + clusterId +
", clusterName='" + clusterName + '\'' +
", topicName='" + topicName + '\'' +
", status=" + status +
", throttle=" + throttle +
", gmtCreate=" + gmtCreate +
", operator='" + operator + '\'' +
'}';
}
}

View File

@@ -0,0 +1,56 @@
package com.xiaojukeji.kafka.manager.web.vo;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.List;
/**
* 分区迁移分配
* @author zengqiao_cn@163.com
* @date 19/4/16
*/
@ApiModel(value = "PartitionReassignmentVO", description = "分区分配")
public class PartitionReassignmentVO {
@ApiModelProperty(value = "Topic名称")
private String topic;
@ApiModelProperty(value = "分区Id")
private Integer partition;
@ApiModelProperty(value = "分配的副本")
private List<Integer> replicas;
public String getTopic() {
return topic;
}
public void setTopic(String topic) {
this.topic = topic;
}
public Integer getPartition() {
return partition;
}
public void setPartition(Integer partition) {
this.partition = partition;
}
public List<Integer> getReplicas() {
return replicas;
}
public void setReplicas(List<Integer> replicas) {
this.replicas = replicas;
}
@Override
public String toString() {
return "ReassignmentVO{" +
"topic='" + topic + '\'' +
", partition=" + partition +
", replicas=" + replicas +
'}';
}
}

View File

@@ -0,0 +1,140 @@
package com.xiaojukeji.kafka.manager.web.vo;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.List;
/**
* Region信息
* @author zengqiao
* @date 19/4/1
*/
@ApiModel(value = "RegionVO", description = "Region信息")
public class RegionVO {
@ApiModelProperty(value = "RegionID")
protected Long regionId;
@ApiModelProperty(value = "集群ID")
private Long clusterId;
@ApiModelProperty(value = "region名称")
private String regionName;
@ApiModelProperty(value = "重要级别, 0:普通, 1:重要2:重要")
private Integer level;
@ApiModelProperty(value = "brokerId列表")
private List<Integer> brokerIdList;
@ApiModelProperty(value = "描述信息")
private String description;
@ApiModelProperty(value = "操作人")
private String operator;
@ApiModelProperty(value = "状态, -1:废弃 0:正常 1:容量已满")
private Integer status;
@ApiModelProperty(value = "创建时间")
private Long gmtCreate;
@ApiModelProperty(value = "修改时间")
private Long gmtModify;
public Long getRegionId() {
return regionId;
}
public void setRegionId(Long regionId) {
this.regionId = regionId;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getRegionName() {
return regionName;
}
public void setRegionName(String regionName) {
this.regionName = regionName;
}
public Integer getLevel() {
return level;
}
public void setLevel(Integer level) {
this.level = level;
}
public List<Integer> getBrokerIdList() {
return brokerIdList;
}
public void setBrokerIdList(List<Integer> brokerIdList) {
this.brokerIdList = brokerIdList;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getOperator() {
return operator;
}
public void setOperator(String operator) {
this.operator = operator;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Long getGmtModify() {
return gmtModify;
}
public void setGmtModify(Long gmtModify) {
this.gmtModify = gmtModify;
}
@Override
public String toString() {
return "RegionVO{" +
"regionId=" + regionId +
", clusterId=" + clusterId +
", regionName='" + regionName + '\'' +
", level=" + level +
", brokerIdList=" + brokerIdList +
", description='" + description + '\'' +
", operator='" + operator + '\'' +
", status=" + status +
", gmtCreate=" + gmtCreate +
", gmtModify=" + gmtModify +
'}';
}
}

View File

@@ -0,0 +1,68 @@
package com.xiaojukeji.kafka.manager.web.vo.alarm;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.AbstractMap;
import java.util.List;
/**
* @author zengqiao
* @date 19/5/12
*/
@ApiModel(value = "AlarmConstantVO", description = "告警常数")
public class AlarmConstantVO {
@ApiModelProperty(value = "条件类型列表")
private List<AbstractMap.SimpleEntry<String, String>> conditionTypeList;
@ApiModelProperty(value = "告警规则类型列表")
private List<AbstractMap.SimpleEntry<Integer, String>> ruleTypeList;
@ApiModelProperty(value = "通知规则类型列表")
private List<AbstractMap.SimpleEntry<String, String>> notifyTypeList;
@ApiModelProperty(value = "指标类型列表")
private List<AbstractMap.SimpleEntry<String, String>> metricTypeList;
public List<AbstractMap.SimpleEntry<String, String>> getConditionTypeList() {
return conditionTypeList;
}
public void setConditionTypeList(List<AbstractMap.SimpleEntry<String, String>> conditionTypeList) {
this.conditionTypeList = conditionTypeList;
}
public List<AbstractMap.SimpleEntry<Integer, String>> getRuleTypeList() {
return ruleTypeList;
}
public void setRuleTypeList(List<AbstractMap.SimpleEntry<Integer, String>> ruleTypeList) {
this.ruleTypeList = ruleTypeList;
}
public List<AbstractMap.SimpleEntry<String, String>> getNotifyTypeList() {
return notifyTypeList;
}
public void setNotifyTypeList(List<AbstractMap.SimpleEntry<String, String>> notifyTypeList) {
this.notifyTypeList = notifyTypeList;
}
public List<AbstractMap.SimpleEntry<String, String>> getMetricTypeList() {
return metricTypeList;
}
public void setMetricTypeList(List<AbstractMap.SimpleEntry<String, String>> metricTypeList) {
this.metricTypeList = metricTypeList;
}
@Override
public String toString() {
return "AlarmConstantVO{" +
"conditionTypeList=" + conditionTypeList +
", ruleTypeList=" + ruleTypeList +
", notifyTypeList=" + notifyTypeList +
", metricTypeList=" + metricTypeList +
'}';
}
}

View File

@@ -0,0 +1,130 @@
package com.xiaojukeji.kafka.manager.web.vo.alarm;
import com.xiaojukeji.kafka.manager.common.entity.dto.alarm.AlarmStrategyActionDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.alarm.AlarmStrategyExpressionDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.alarm.AlarmStrategyFilterDTO;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.List;
/**
* @author zengqiao
* @date 19/5/12
*/
@ApiModel(value = "AlarmRuleVO", description = "告警规则")
public class AlarmRuleVO {
@ApiModelProperty(value = "告警Id, 修改时必传")
private Long id;
@ApiModelProperty(value = "报警名称")
private String alarmName;
@ApiModelProperty(value = "策略表达式")
private List<AlarmStrategyExpressionDTO> strategyExpressionList;
@ApiModelProperty(value = "策略筛选")
private List<AlarmStrategyFilterDTO> strategyFilterList;
@ApiModelProperty(value = "策略响应")
private List<AlarmStrategyActionDTO> strategyActionList;
@ApiModelProperty(value = "负责人列表")
private List<String> principalList;
@ApiModelProperty(value = "是否启用[1:启用, 0:不启用, -1:已删除]")
private Integer status;
@ApiModelProperty(value = "创建时间")
private Long gmtCreate;
@ApiModelProperty(value = "修改时间")
private Long gmtModify;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getAlarmName() {
return alarmName;
}
public void setAlarmName(String alarmName) {
this.alarmName = alarmName;
}
public List<AlarmStrategyExpressionDTO> getStrategyExpressionList() {
return strategyExpressionList;
}
public void setStrategyExpressionList(List<AlarmStrategyExpressionDTO> strategyExpressionList) {
this.strategyExpressionList = strategyExpressionList;
}
public List<AlarmStrategyFilterDTO> getStrategyFilterList() {
return strategyFilterList;
}
public void setStrategyFilterList(List<AlarmStrategyFilterDTO> strategyFilterList) {
this.strategyFilterList = strategyFilterList;
}
public List<AlarmStrategyActionDTO> getStrategyActionList() {
return strategyActionList;
}
public void setStrategyActionList(List<AlarmStrategyActionDTO> strategyActionList) {
this.strategyActionList = strategyActionList;
}
public List<String> getPrincipalList() {
return principalList;
}
public void setPrincipalList(List<String> principalList) {
this.principalList = principalList;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Long getGmtModify() {
return gmtModify;
}
public void setGmtModify(Long gmtModify) {
this.gmtModify = gmtModify;
}
@Override
public String toString() {
return "AlarmRuleVO{" +
"id=" + id +
", alarmName='" + alarmName + '\'' +
", strategyExpressionList=" + strategyExpressionList +
", strategyFilterList=" + strategyFilterList +
", strategyActionList=" + strategyActionList +
", principalList=" + principalList +
", status=" + status +
", gmtCreate=" + gmtCreate +
", gmtModify=" + gmtModify +
'}';
}
}

View File

@@ -0,0 +1,114 @@
package com.xiaojukeji.kafka.manager.web.vo.broker;
import java.util.List;
/**
* @author zengqiao
* @date 19/12/29
*/
public class AnalysisBrokerVO {
private Long clusterId;
private Integer brokerId;
private Long baseTime;
private Double bytesIn;
private Double bytesOut;
private Double messagesIn;
private Double totalFetchRequests;
private Double totalProduceRequests;
List<AnalysisTopicVO> topicAnalysisVOList;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public Integer getBrokerId() {
return brokerId;
}
public void setBrokerId(Integer brokerId) {
this.brokerId = brokerId;
}
public Long getBaseTime() {
return baseTime;
}
public void setBaseTime(Long baseTime) {
this.baseTime = baseTime;
}
public Double getBytesIn() {
return bytesIn;
}
public void setBytesIn(Double bytesIn) {
this.bytesIn = bytesIn;
}
public Double getBytesOut() {
return bytesOut;
}
public void setBytesOut(Double bytesOut) {
this.bytesOut = bytesOut;
}
public Double getMessagesIn() {
return messagesIn;
}
public void setMessagesIn(Double messagesIn) {
this.messagesIn = messagesIn;
}
public Double getTotalFetchRequests() {
return totalFetchRequests;
}
public void setTotalFetchRequests(Double totalFetchRequests) {
this.totalFetchRequests = totalFetchRequests;
}
public Double getTotalProduceRequests() {
return totalProduceRequests;
}
public void setTotalProduceRequests(Double totalProduceRequests) {
this.totalProduceRequests = totalProduceRequests;
}
public List<AnalysisTopicVO> getTopicAnalysisVOList() {
return topicAnalysisVOList;
}
public void setTopicAnalysisVOList(List<AnalysisTopicVO> topicAnalysisVOList) {
this.topicAnalysisVOList = topicAnalysisVOList;
}
@Override
public String toString() {
return "AnalysisBrokerVO{" +
"clusterId=" + clusterId +
", brokerId=" + brokerId +
", baseTime=" + baseTime +
", bytesIn=" + bytesIn +
", bytesOut=" + bytesOut +
", messagesIn=" + messagesIn +
", totalFetchRequests=" + totalFetchRequests +
", totalProduceRequests=" + totalProduceRequests +
", topicAnalysisVOList=" + topicAnalysisVOList +
'}';
}
}

View File

@@ -0,0 +1,134 @@
package com.xiaojukeji.kafka.manager.web.vo.broker;
/**
* @author zengqiao
* @date 20/3/11
*/
public class AnalysisTopicVO {
private String topicName;
private String bytesIn;
private String bytesInRate;
private String bytesOut;
private String bytesOutRate;
private String messagesIn;
private String messagesInRate;
private String totalFetchRequests;
private String totalFetchRequestsRate;
private String totalProduceRequests;
private String totalProduceRequestsRate;
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public String getBytesIn() {
return bytesIn;
}
public void setBytesIn(String bytesIn) {
this.bytesIn = bytesIn;
}
public String getBytesInRate() {
return bytesInRate;
}
public void setBytesInRate(String bytesInRate) {
this.bytesInRate = bytesInRate;
}
public String getBytesOut() {
return bytesOut;
}
public void setBytesOut(String bytesOut) {
this.bytesOut = bytesOut;
}
public String getBytesOutRate() {
return bytesOutRate;
}
public void setBytesOutRate(String bytesOutRate) {
this.bytesOutRate = bytesOutRate;
}
public String getMessagesIn() {
return messagesIn;
}
public void setMessagesIn(String messagesIn) {
this.messagesIn = messagesIn;
}
public String getMessagesInRate() {
return messagesInRate;
}
public void setMessagesInRate(String messagesInRate) {
this.messagesInRate = messagesInRate;
}
public String getTotalFetchRequests() {
return totalFetchRequests;
}
public void setTotalFetchRequests(String totalFetchRequests) {
this.totalFetchRequests = totalFetchRequests;
}
public String getTotalFetchRequestsRate() {
return totalFetchRequestsRate;
}
public void setTotalFetchRequestsRate(String totalFetchRequestsRate) {
this.totalFetchRequestsRate = totalFetchRequestsRate;
}
public String getTotalProduceRequests() {
return totalProduceRequests;
}
public void setTotalProduceRequests(String totalProduceRequests) {
this.totalProduceRequests = totalProduceRequests;
}
public String getTotalProduceRequestsRate() {
return totalProduceRequestsRate;
}
public void setTotalProduceRequestsRate(String totalProduceRequestsRate) {
this.totalProduceRequestsRate = totalProduceRequestsRate;
}
@Override
public String toString() {
return "AnalysisTopicVO{" +
"topicName='" + topicName + '\'' +
", bytesIn='" + bytesIn + '\'' +
", bytesInRate='" + bytesInRate + '\'' +
", bytesOut='" + bytesOut + '\'' +
", bytesOutRate='" + bytesOutRate + '\'' +
", messagesIn='" + messagesIn + '\'' +
", messagesInRate='" + messagesInRate + '\'' +
", totalFetchRequests='" + totalFetchRequests + '\'' +
", totalFetchRequestsRate='" + totalFetchRequestsRate + '\'' +
", totalProduceRequests='" + totalProduceRequests + '\'' +
", totalProduceRequestsRate='" + totalProduceRequestsRate + '\'' +
'}';
}
}

View File

@@ -0,0 +1,103 @@
package com.xiaojukeji.kafka.manager.web.vo.broker;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
/**
* @author huangjw
* @date 17/6/1.
*/
@ApiModel(value = "BrokerBasicInfoVO", description = "账号信息")
public class BrokerBasicVO implements Serializable {
@ApiModelProperty(value = "主机名")
private String host;
@ApiModelProperty(value = "服务端口")
private Integer port;
@ApiModelProperty(value = "JMX端口")
private Integer jmxPort;
@ApiModelProperty(value = "Topic数")
private Integer topicNum;
@ApiModelProperty(value = "分区数")
private Integer partitionCount;
@ApiModelProperty(value = "Leader数")
private Integer leaderCount;
@ApiModelProperty(value = "启动时间")
private Long startTime;
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
public Integer getJmxPort() {
return jmxPort;
}
public void setJmxPort(Integer jmxPort) {
this.jmxPort = jmxPort;
}
public Integer getTopicNum() {
return topicNum;
}
public void setTopicNum(Integer topicNum) {
this.topicNum = topicNum;
}
public Integer getPartitionCount() {
return partitionCount;
}
public void setPartitionCount(Integer partitionCount) {
this.partitionCount = partitionCount;
}
public Long getStartTime() {
return startTime;
}
public void setStartTime(Long startTime) {
this.startTime = startTime;
}
public Integer getLeaderCount() {
return leaderCount;
}
public void setLeaderCount(Integer leaderCount) {
this.leaderCount = leaderCount;
}
@Override
public String toString() {
return "BrokerBasicInfoVO{" +
"host='" + host + '\'' +
", port=" + port +
", jmxPort=" + jmxPort +
", topicNum=" + topicNum +
", partitionCount=" + partitionCount +
", leaderCount=" + leaderCount +
", startTime=" + startTime +
'}';
}
}

View File

@@ -0,0 +1,173 @@
package com.xiaojukeji.kafka.manager.web.vo.broker;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* Broker关键指标
* @date 2019-06-03
*/
@ApiModel(value = "BrokerKeyMetricsVO", description = "Broker关键指标")
public class BrokerKeyMetricsVO {
@ApiModelProperty(value = "DB的ID")
private Long id;
@ApiModelProperty(value = "请求处理器空闲百分比")
private Double requestHandlerIdlPercent;
@ApiModelProperty(value = "网络处理器空闲百分比")
private Double networkProcessorIdlPercent;
@ApiModelProperty(value = "请求队列大小")
private Integer requestQueueSize;
@ApiModelProperty(value = "响应队列大小")
private Integer responseQueueSize;
@ApiModelProperty(value = "刷日志事件")
private Double logFlushTime;
@ApiModelProperty(value = "每秒消费失败数")
private Double failFetchRequest;
@ApiModelProperty(value = "每秒发送失败数")
private Double failProduceRequest;
@ApiModelProperty(value = "发送耗时均值")
private Double totalTimeProduceMean;
@ApiModelProperty(value = "发送耗时99分位")
private Double totalTimeProduce99Th;
@ApiModelProperty(value = "消费耗时均值")
private Double totalTimeFetchConsumerMean;
@ApiModelProperty(value = "消费耗时99分位")
private Double totalTimeFetchConsumer99Th;
@ApiModelProperty(value = "创建事件")
private Long gmtCreate;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Double getRequestHandlerIdlPercent() {
return requestHandlerIdlPercent;
}
public void setRequestHandlerIdlPercent(Double requestHandlerIdlPercent) {
this.requestHandlerIdlPercent = requestHandlerIdlPercent;
}
public Double getNetworkProcessorIdlPercent() {
return networkProcessorIdlPercent;
}
public void setNetworkProcessorIdlPercent(Double networkProcessorIdlPercent) {
this.networkProcessorIdlPercent = networkProcessorIdlPercent;
}
public Integer getRequestQueueSize() {
return requestQueueSize;
}
public void setRequestQueueSize(Integer requestQueueSize) {
this.requestQueueSize = requestQueueSize;
}
public Integer getResponseQueueSize() {
return responseQueueSize;
}
public void setResponseQueueSize(Integer responseQueueSize) {
this.responseQueueSize = responseQueueSize;
}
public Double getLogFlushTime() {
return logFlushTime;
}
public void setLogFlushTime(Double logFlushTime) {
this.logFlushTime = logFlushTime;
}
public Double getFailFetchRequest() {
return failFetchRequest;
}
public void setFailFetchRequest(Double failFetchRequest) {
this.failFetchRequest = failFetchRequest;
}
public Double getFailProduceRequest() {
return failProduceRequest;
}
public void setFailProduceRequest(Double failProduceRequest) {
this.failProduceRequest = failProduceRequest;
}
public Double getTotalTimeProduceMean() {
return totalTimeProduceMean;
}
public void setTotalTimeProduceMean(Double totalTimeProduceMean) {
this.totalTimeProduceMean = totalTimeProduceMean;
}
public Double getTotalTimeProduce99Th() {
return totalTimeProduce99Th;
}
public void setTotalTimeProduce99Th(Double totalTimeProduce99Th) {
this.totalTimeProduce99Th = totalTimeProduce99Th;
}
public Double getTotalTimeFetchConsumerMean() {
return totalTimeFetchConsumerMean;
}
public void setTotalTimeFetchConsumerMean(Double totalTimeFetchConsumerMean) {
this.totalTimeFetchConsumerMean = totalTimeFetchConsumerMean;
}
public Double getTotalTimeFetchConsumer99Th() {
return totalTimeFetchConsumer99Th;
}
public void setTotalTimeFetchConsumer99Th(Double totalTimeFetchConsumer99Th) {
this.totalTimeFetchConsumer99Th = totalTimeFetchConsumer99Th;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
@Override
public String toString() {
return "BrokerKeyMetricsVO{" +
"id=" + id +
", requestHandlerIdlPercent=" + requestHandlerIdlPercent +
", networkProcessorIdlPercent=" + networkProcessorIdlPercent +
", requestQueueSize=" + requestQueueSize +
", responseQueueSize=" + responseQueueSize +
", logFlushTime=" + logFlushTime +
", failFetchRequest=" + failFetchRequest +
", failProduceRequest=" + failProduceRequest +
", totalTimeProduceMean=" + totalTimeProduceMean +
", totalTimeProduce99Th=" + totalTimeProduce99Th +
", totalTimeFetchConsumerMean=" + totalTimeFetchConsumerMean +
", totalTimeFetchConsumer99Th=" + totalTimeFetchConsumer99Th +
", gmtCreate=" + gmtCreate +
'}';
}
}

View File

@@ -0,0 +1,44 @@
package com.xiaojukeji.kafka.manager.web.vo.broker;
/**
* @author zengqiao
* @date 19/7/12
*/
public class BrokerMetadataVO {
private Integer brokerId;
private String host;
public BrokerMetadataVO(int brokerId, String host) {
this.brokerId = brokerId;
this.host = host;
}
public int getBrokerId() {
return brokerId;
}
public void setBrokerId(int brokerId) {
this.brokerId = brokerId;
}
public void setBrokerId(Integer brokerId) {
this.brokerId = brokerId;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
@Override
public String toString() {
return "BrokerMetadataVO{" +
"brokerId=" + brokerId +
", host='" + host + '\'' +
'}';
}
}

View File

@@ -0,0 +1,78 @@
package com.xiaojukeji.kafka.manager.web.vo.broker;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* BROKER metrics
* @author zengqiao
* @date 19/3/18
*/
@ApiModel(value = "BrokerMetricsVO", description = "Broker流量信息")
public class BrokerMetricsVO {
@ApiModelProperty(value = "消息数")
private Double messagesInPerSec;
@ApiModelProperty(value = "流入流量(B)")
private Double bytesInPerSec;
@ApiModelProperty(value = "流出流量(B)")
private Double bytesOutPerSec;
@ApiModelProperty(value = "被拒绝流量(B)")
private Double bytesRejectedPerSec;
@ApiModelProperty(value = "创建时间")
private Long gmtCreate;
public Double getMessagesInPerSec() {
return messagesInPerSec;
}
public void setMessagesInPerSec(Double messagesInPerSec) {
this.messagesInPerSec = messagesInPerSec;
}
public Double getBytesInPerSec() {
return bytesInPerSec;
}
public void setBytesInPerSec(Double bytesInPerSec) {
this.bytesInPerSec = bytesInPerSec;
}
public Double getBytesOutPerSec() {
return bytesOutPerSec;
}
public void setBytesOutPerSec(Double bytesOutPerSec) {
this.bytesOutPerSec = bytesOutPerSec;
}
public Double getBytesRejectedPerSec() {
return bytesRejectedPerSec;
}
public void setBytesRejectedPerSec(Double bytesRejectedPerSec) {
this.bytesRejectedPerSec = bytesRejectedPerSec;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
@Override
public String toString() {
return "BrokerMetricsVO{" +
"messagesInPerSec=" + messagesInPerSec +
", bytesInPerSec=" + bytesInPerSec +
", bytesOutPerSec=" + bytesOutPerSec +
", bytesRejectedPerSec=" + bytesRejectedPerSec +
", gmtCreate=" + gmtCreate +
'}';
}
}

View File

@@ -0,0 +1,137 @@
package com.xiaojukeji.kafka.manager.web.vo.broker;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* @author zengqiao
* @date 19/4/21
*/
@ApiModel(value = "BrokerOverallVO", description = "Broker总揽")
public class BrokerOverallVO {
@ApiModelProperty(value = "brokerId")
private Integer brokerId;
@ApiModelProperty(value = "主机名")
private String host;
@ApiModelProperty(value = "端口")
private Integer port;
@ApiModelProperty(value = "jmx端口")
private Integer jmxPort;
@ApiModelProperty(value = "启动时间")
private Long startTime;
@ApiModelProperty(value = "流入流量(MB/s)")
private Double bytesInPerSec;
@ApiModelProperty(value = "分区数")
private Integer partitionCount;
@ApiModelProperty(value = "未同步分区数")
private Integer notUnderReplicatedPartitionCount;
@ApiModelProperty(value = "leader数")
private Integer leaderCount;
@ApiModelProperty(value = "region名称")
private String regionName;
public Integer getBrokerId() {
return brokerId;
}
public void setBrokerId(Integer brokerId) {
this.brokerId = brokerId;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
public Integer getJmxPort() {
return jmxPort;
}
public void setJmxPort(Integer jmxPort) {
this.jmxPort = jmxPort;
}
public Long getStartTime() {
return startTime;
}
public void setStartTime(Long startTime) {
this.startTime = startTime;
}
public Double getBytesInPerSec() {
return bytesInPerSec;
}
public void setBytesInPerSec(Double bytesInPerSec) {
this.bytesInPerSec = bytesInPerSec;
}
public Integer getPartitionCount() {
return partitionCount;
}
public void setPartitionCount(Integer partitionCount) {
this.partitionCount = partitionCount;
}
public Integer getNotUnderReplicatedPartitionCount() {
return notUnderReplicatedPartitionCount;
}
public void setNotUnderReplicatedPartitionCount(Integer notUnderReplicatedPartitionCount) {
this.notUnderReplicatedPartitionCount = notUnderReplicatedPartitionCount;
}
public Integer getLeaderCount() {
return leaderCount;
}
public void setLeaderCount(Integer leaderCount) {
this.leaderCount = leaderCount;
}
public String getRegionName() {
return regionName;
}
public void setRegionName(String regionName) {
this.regionName = regionName;
}
@Override
public String toString() {
return "BrokerOverallVO{" +
"brokerId=" + brokerId +
", host='" + host + '\'' +
", port=" + port +
", jmxPort=" + jmxPort +
", startTime=" + startTime +
", bytesInPerSec=" + bytesInPerSec +
", partitionCount=" + partitionCount +
", notUnderReplicatedPartitionCount=" + notUnderReplicatedPartitionCount +
", leaderCount=" + leaderCount +
", regionName='" + regionName + '\'' +
'}';
}
}

View File

@@ -0,0 +1,151 @@
package com.xiaojukeji.kafka.manager.web.vo.broker;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.Objects;
/**
* @author zengqiao
* @date 19/4/3
*/
@ApiModel(value = "BrokerOverviewVO", description = "Broker概括信息")
public class BrokerOverviewVO implements Comparable<BrokerOverviewVO>{
@ApiModelProperty(value = "brokerId")
private Integer brokerId;
@ApiModelProperty(value = "主机名")
private String host;
@ApiModelProperty(value = "端口")
private Integer port;
@ApiModelProperty(value = "jmx端口")
private Integer jmxPort;
@ApiModelProperty(value = "启动时间")
private Long startTime;
@ApiModelProperty(value = "流入流量")
private Double byteIn;
@ApiModelProperty(value = "流出流量")
private Double byteOut;
@ApiModelProperty(value = "broker状态[0:在线, -1:不在线]")
private Integer status;
@ApiModelProperty(value = "region名称")
private String regionName;
public Integer getBrokerId() {
return brokerId;
}
public void setBrokerId(Integer brokerId) {
this.brokerId = brokerId;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
public Integer getJmxPort() {
return jmxPort;
}
public void setJmxPort(Integer jmxPort) {
this.jmxPort = jmxPort;
}
public Long getStartTime() {
return startTime;
}
public void setStartTime(Long startTime) {
this.startTime = startTime;
}
public Double getByteIn() {
return byteIn;
}
public void setByteIn(Double byteIn) {
this.byteIn = byteIn;
}
public Double getByteOut() {
return byteOut;
}
public void setByteOut(Double byteOut) {
this.byteOut = byteOut;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public String getRegionName() {
return regionName;
}
public void setRegionName(String regionName) {
this.regionName = regionName;
}
@Override
public int compareTo(BrokerOverviewVO that) {
return this.getBrokerId() - that.getBrokerId();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BrokerOverviewVO that = (BrokerOverviewVO) o;
return Objects.equals(brokerId, that.brokerId) &&
Objects.equals(host, that.host) &&
Objects.equals(port, that.port);
}
@Override
public int hashCode() {
return Objects.hash(brokerId, host, port);
}
@Override
public String toString() {
return "BrokerOverviewVO{" +
"brokerId=" + brokerId +
", host='" + host + '\'' +
", port=" + port +
", jmxPort=" + jmxPort +
", startTime=" + startTime +
", byteIn=" + byteIn +
", byteOut=" + byteOut +
", status=" + status +
", regionName='" + regionName + '\'' +
'}';
}
}

View File

@@ -0,0 +1,80 @@
package com.xiaojukeji.kafka.manager.web.vo.broker;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
import java.util.List;
/**
* @author zengqiao
* @date 19/4/22
*/
@ApiModel(value = "BrokerPartitionsVO", description = "Broker分区信息")
public class BrokerPartitionsVO implements Serializable {
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "Leader分区")
private List<Integer> leaderPartitionList;
@ApiModelProperty(value = "Follower分区")
private List<Integer> followerPartitionIdList;
@ApiModelProperty(value = "是否未同步完成")
private Boolean underReplicated;
@ApiModelProperty(value = "未同步分区列表")
private List<Integer> notUnderReplicatedPartitionIdList;
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public List<Integer> getLeaderPartitionList() {
return leaderPartitionList;
}
public void setLeaderPartitionList(List<Integer> leaderPartitionList) {
this.leaderPartitionList = leaderPartitionList;
}
public List<Integer> getFollowerPartitionIdList() {
return followerPartitionIdList;
}
public void setFollowerPartitionIdList(List<Integer> followerPartitionIdList) {
this.followerPartitionIdList = followerPartitionIdList;
}
public Boolean getUnderReplicated() {
return underReplicated;
}
public void setUnderReplicated(Boolean underReplicated) {
this.underReplicated = underReplicated;
}
public List<Integer> getNotUnderReplicatedPartitionIdList() {
return notUnderReplicatedPartitionIdList;
}
public void setNotUnderReplicatedPartitionIdList(List<Integer> notUnderReplicatedPartitionIdList) {
this.notUnderReplicatedPartitionIdList = notUnderReplicatedPartitionIdList;
}
@Override
public String toString() {
return "BrokerPartitionsVO{" +
"topicName='" + topicName + '\'' +
", leaderPartitionList=" + leaderPartitionList +
", followerPartitionIdList=" + followerPartitionIdList +
", underReplicated=" + underReplicated +
", notUnderReplicatedPartitionIdList=" + notUnderReplicatedPartitionIdList +
'}';
}
}

View File

@@ -0,0 +1,116 @@
package com.xiaojukeji.kafka.manager.web.vo.broker;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
import java.util.List;
/**
* @author zengqiao
* @date 19/4/22
*/
@ApiModel(value = "BrokerStatusVO", description = "Broker流量信息")
public class BrokerStatusVO implements Serializable {
@ApiModelProperty(value = "消息数")
private List<Double> messageIn;
@ApiModelProperty(value = "流入流量(B)")
private List<Double> byteIn;
@ApiModelProperty(value = "流出流量(B)")
private List<Double> byteOut;
@ApiModelProperty(value = "被拒绝流量(B)")
private List<Double> byteRejected;
@ApiModelProperty(value = "Fetch失败请求数")
private List<Double> failedFetchRequest;
@ApiModelProperty(value = "Produce失败请求数")
private List<Double> failedProduceRequest;
@ApiModelProperty(value = "Fetch请求数")
private List<Double> fetchConsumerRequest;
@ApiModelProperty(value = "Produce请求数")
private List<Double> produceRequest;
public List<Double> getMessageIn() {
return messageIn;
}
public void setMessageIn(List<Double> messageIn) {
this.messageIn = messageIn;
}
public List<Double> getByteIn() {
return byteIn;
}
public void setByteIn(List<Double> byteIn) {
this.byteIn = byteIn;
}
public List<Double> getByteOut() {
return byteOut;
}
public void setByteOut(List<Double> byteOut) {
this.byteOut = byteOut;
}
public List<Double> getByteRejected() {
return byteRejected;
}
public void setByteRejected(List<Double> byteRejected) {
this.byteRejected = byteRejected;
}
public List<Double> getFailedFetchRequest() {
return failedFetchRequest;
}
public void setFailedFetchRequest(List<Double> failedFetchRequest) {
this.failedFetchRequest = failedFetchRequest;
}
public List<Double> getFailedProduceRequest() {
return failedProduceRequest;
}
public void setFailedProduceRequest(List<Double> failedProduceRequest) {
this.failedProduceRequest = failedProduceRequest;
}
public List<Double> getFetchConsumerRequest() {
return fetchConsumerRequest;
}
public void setFetchConsumerRequest(List<Double> fetchConsumerRequest) {
this.fetchConsumerRequest = fetchConsumerRequest;
}
public List<Double> getProduceRequest() {
return produceRequest;
}
public void setProduceRequest(List<Double> produceRequest) {
this.produceRequest = produceRequest;
}
@Override
public String toString() {
return "BrokerStatusVO{" +
"messageIn=" + messageIn +
", byteIn=" + byteIn +
", byteOut=" + byteOut +
", byteRejected=" + byteRejected +
", failedFetchRequest=" + failedFetchRequest +
", failedProduceRequest=" + failedProduceRequest +
", fetchConsumerRequest=" + fetchConsumerRequest +
", produceRequest=" + produceRequest +
'}';
}
}

View File

@@ -0,0 +1,114 @@
package com.xiaojukeji.kafka.manager.web.vo.cluster;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* cluster basic info
* @author zengqiao
* @date 19/3/18
*/
@ApiModel(value="ClusterBasicVO", description="集群基本信息")
public class ClusterBasicVO {
@ApiModelProperty(value="集群Id")
private Long clusterId;
@ApiModelProperty(value="集群名称")
private String clusterName;
@ApiModelProperty(value="bootstrap地址")
private String bootstrapServers;
@ApiModelProperty(value="kafka版本")
private String kafkaVersion;
@ApiModelProperty(value="broker数量")
private Integer brokerNum;
@ApiModelProperty(value="topic数量")
private Integer topicNum;
@ApiModelProperty(value="集群创建时间")
private Long gmtCreate;
@ApiModelProperty(value="集群修改时间")
private Long gmtModify;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getBootstrapServers() {
return bootstrapServers;
}
public void setBootstrapServers(String bootstrapServers) {
this.bootstrapServers = bootstrapServers;
}
public String getKafkaVersion() {
return kafkaVersion;
}
public void setKafkaVersion(String kafkaVersion) {
this.kafkaVersion = kafkaVersion;
}
public Integer getBrokerNum() {
return brokerNum;
}
public void setBrokerNum(Integer brokerNum) {
this.brokerNum = brokerNum;
}
public Integer getTopicNum() {
return topicNum;
}
public void setTopicNum(Integer topicNum) {
this.topicNum = topicNum;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Long getGmtModify() {
return gmtModify;
}
public void setGmtModify(Long gmtModify) {
this.gmtModify = gmtModify;
}
@Override
public String toString() {
return "ClusterBasicVO{" +
"clusterId=" + clusterId +
", clusterName='" + clusterName + '\'' +
", bootstrapServers='" + bootstrapServers + '\'' +
", kafkaVersion='" + kafkaVersion + '\'' +
", brokerNum=" + brokerNum +
", topicNum=" + topicNum +
", gmtCreate=" + gmtCreate +
", gmtModify=" + gmtModify +
'}';
}
}

View File

@@ -0,0 +1,222 @@
package com.xiaojukeji.kafka.manager.web.vo.cluster;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* ClusterDetailVO
* @author huangyiminghappy@163.com
* @date 2019/3/15
*/
@ApiModel(value="ClusterDetailVO", description="集群详细信息")
public class ClusterDetailVO {
@ApiModelProperty(value="集群Id")
private Long clusterId;
@ApiModelProperty(value="集群名称")
private String clusterName;
@ApiModelProperty(value="集群ZK地址")
private String zookeeper;
@ApiModelProperty(value="bootstrap地址")
private String bootstrapServers;
@ApiModelProperty(value="kafka版本")
private String kafkaVersion;
@ApiModelProperty(value="broker数量")
private Integer brokerNum;
@ApiModelProperty(value="topic数量")
private Integer topicNum;
@ApiModelProperty(value="consumerGroup数量")
private Integer consumerGroupNum;
@ApiModelProperty(value="controllerId")
private Integer controllerId;
@ApiModelProperty(value="安全协议")
private String securityProtocol;
@ApiModelProperty(value="SASL机制")
private String saslMechanism;
@ApiModelProperty(value="SASL的JSSA配置")
private String saslJaasConfig;
@ApiModelProperty(value="regionNum数")
private Integer regionNum;
@ApiModelProperty(value = "开启告警[0:不开启, 1:开启]")
private Integer alarmFlag;
@ApiModelProperty(value="是否已经删除0不删除1删除")
private Integer status;
@ApiModelProperty(value="集群创建时间")
private Long gmtCreate;
@ApiModelProperty(value="集群修改时间")
private Long gmtModify;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getZookeeper() {
return zookeeper;
}
public void setZookeeper(String zookeeper) {
this.zookeeper = zookeeper;
}
public String getBootstrapServers() {
return bootstrapServers;
}
public void setBootstrapServers(String bootstrapServers) {
this.bootstrapServers = bootstrapServers;
}
public String getKafkaVersion() {
return kafkaVersion;
}
public void setKafkaVersion(String kafkaVersion) {
this.kafkaVersion = kafkaVersion;
}
public Integer getBrokerNum() {
return brokerNum;
}
public void setBrokerNum(Integer brokerNum) {
this.brokerNum = brokerNum;
}
public Integer getTopicNum() {
return topicNum;
}
public void setTopicNum(Integer topicNum) {
this.topicNum = topicNum;
}
public Integer getConsumerGroupNum() {
return consumerGroupNum;
}
public void setConsumerGroupNum(Integer consumerGroupNum) {
this.consumerGroupNum = consumerGroupNum;
}
public Integer getControllerId() {
return controllerId;
}
public void setControllerId(Integer controllerId) {
this.controllerId = controllerId;
}
public String getSecurityProtocol() {
return securityProtocol;
}
public void setSecurityProtocol(String securityProtocol) {
this.securityProtocol = securityProtocol;
}
public String getSaslMechanism() {
return saslMechanism;
}
public void setSaslMechanism(String saslMechanism) {
this.saslMechanism = saslMechanism;
}
public String getSaslJaasConfig() {
return saslJaasConfig;
}
public void setSaslJaasConfig(String saslJaasConfig) {
this.saslJaasConfig = saslJaasConfig;
}
public Integer getRegionNum() {
return regionNum;
}
public void setRegionNum(Integer regionNum) {
this.regionNum = regionNum;
}
public Integer getAlarmFlag() {
return alarmFlag;
}
public void setAlarmFlag(Integer alarmFlag) {
this.alarmFlag = alarmFlag;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Long getGmtModify() {
return gmtModify;
}
public void setGmtModify(Long gmtModify) {
this.gmtModify = gmtModify;
}
@Override
public String toString() {
return "ClusterDetailVO{" +
"clusterId=" + clusterId +
", clusterName='" + clusterName + '\'' +
", zookeeper='" + zookeeper + '\'' +
", bootstrapServers='" + bootstrapServers + '\'' +
", kafkaVersion='" + kafkaVersion + '\'' +
", brokerNum=" + brokerNum +
", topicNum=" + topicNum +
", consumerGroupNum=" + consumerGroupNum +
", controllerId=" + controllerId +
", securityProtocol='" + securityProtocol + '\'' +
", saslMechanism='" + saslMechanism + '\'' +
", saslJaasConfig='" + saslJaasConfig + '\'' +
", regionNum=" + regionNum +
", alarmFlag=" + alarmFlag +
", status=" + status +
", gmtCreate=" + gmtCreate +
", gmtModify=" + gmtModify +
'}';
}
}

View File

@@ -0,0 +1,126 @@
package com.xiaojukeji.kafka.manager.web.vo.cluster;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* ClusterMetricsVO
* @author zengqiao
* @date 19/4/3
*/
@ApiModel(value="ClusterMetricsVO", description="集群流量信息")
public class ClusterMetricsVO {
@ApiModelProperty(value="集群Id")
private Long clusterId;
@ApiModelProperty(value="Topic数量")
private Integer topicNum;
@ApiModelProperty(value="Partition数量")
private Integer partitionNum;
@ApiModelProperty(value="Broker数量")
private Integer brokerNum;
@ApiModelProperty(value="每秒流入的字节数")
private Double bytesInPerSec;
@ApiModelProperty(value="每秒流出的字节数")
private Double bytesOutPerSec;
@ApiModelProperty(value="每秒拒绝的字节数")
private Double bytesRejectedPerSec;
@ApiModelProperty(value="每秒流入的消息数")
private Double messagesInPerSec;
@ApiModelProperty(value="创建时间")
private Long gmtCreate;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public Integer getTopicNum() {
return topicNum;
}
public void setTopicNum(Integer topicNum) {
this.topicNum = topicNum;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Integer getBrokerNum() {
return brokerNum;
}
public void setBrokerNum(Integer brokerNum) {
this.brokerNum = brokerNum;
}
public Double getBytesInPerSec() {
return bytesInPerSec;
}
public void setBytesInPerSec(Double bytesInPerSec) {
this.bytesInPerSec = bytesInPerSec;
}
public Double getBytesOutPerSec() {
return bytesOutPerSec;
}
public void setBytesOutPerSec(Double bytesOutPerSec) {
this.bytesOutPerSec = bytesOutPerSec;
}
public Double getBytesRejectedPerSec() {
return bytesRejectedPerSec;
}
public void setBytesRejectedPerSec(Double bytesRejectedPerSec) {
this.bytesRejectedPerSec = bytesRejectedPerSec;
}
public Double getMessagesInPerSec() {
return messagesInPerSec;
}
public void setMessagesInPerSec(Double messagesInPerSec) {
this.messagesInPerSec = messagesInPerSec;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
@Override
public String toString() {
return "ClusterMetricsVO{" +
"clusterId=" + clusterId +
", topicNum=" + topicNum +
", partitionNum=" + partitionNum +
", brokerNum=" + brokerNum +
", bytesInPerSec=" + bytesInPerSec +
", bytesOutPerSec=" + bytesOutPerSec +
", bytesRejectedPerSec=" + bytesRejectedPerSec +
", messagesInPerSec=" + messagesInPerSec +
", gmtCreate=" + gmtCreate +
'}';
}
}

View File

@@ -0,0 +1,112 @@
package com.xiaojukeji.kafka.manager.web.vo.consumer;
/**
* @author zengqiao
* @date 19/4/3
*/
public class ConsumerGroupDetailVO {
private Long clusterId;
private String topicName;
private String consumerGroup;
private String location;
private Integer partitionId;
private String clientId;
private Long consumeOffset;
private Long partitionOffset;
private Long lag;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public String getConsumerGroup() {
return consumerGroup;
}
public void setConsumerGroup(String consumerGroup) {
this.consumerGroup = consumerGroup;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
public Integer getPartitionId() {
return partitionId;
}
public void setPartitionId(Integer partitionId) {
this.partitionId = partitionId;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public Long getConsumeOffset() {
return consumeOffset;
}
public void setConsumeOffset(Long consumeOffset) {
this.consumeOffset = consumeOffset;
}
public Long getPartitionOffset() {
return partitionOffset;
}
public void setPartitionOffset(Long partitionOffset) {
this.partitionOffset = partitionOffset;
}
public Long getLag() {
return lag;
}
public void setLag(Long lag) {
this.lag = lag;
}
@Override
public String toString() {
return "ConsumerGroupDetailVO{" +
"clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", consumerGroup='" + consumerGroup + '\'' +
", location='" + location + '\'' +
", partitionId=" + partitionId +
", clientId='" + clientId + '\'' +
", consumeOffset=" + consumeOffset +
", partitionOffset=" + partitionOffset +
", lag=" + lag +
'}';
}
}

View File

@@ -0,0 +1,49 @@
package com.xiaojukeji.kafka.manager.web.vo.consumer;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
/**
* 消费组信息
* @author zengqiao
* @date 19/4/3
*/
@ApiModel(value = "ConsumerGroupVO", description = "消费组信息")
public class ConsumerGroupVO implements Serializable {
@ApiModelProperty(value = "消费组名称")
private String consumerGroup;
@ApiModelProperty(value = "存储位置")
private String location;
public ConsumerGroupVO(String consumerGroup, String location) {
this.consumerGroup = consumerGroup;
this.location = location;
}
public String getConsumerGroup() {
return consumerGroup;
}
public void setConsumerGroup(String consumerGroup) {
this.consumerGroup = consumerGroup;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public String toString() {
return "ConsumerGroupVO{" +
"consumerGroup='" + consumerGroup + '\'' +
", location='" + location + '\'' +
'}';
}
}

View File

@@ -0,0 +1,223 @@
package com.xiaojukeji.kafka.manager.web.vo.order;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.List;
/**
* @author zengqiao
* @date 19/6/23
*/
@ApiModel(value = "OrderPartitionVO", description = "分区申请工单")
public class OrderPartitionVO {
@ApiModelProperty(value = "工单ID")
private Long orderId;
@ApiModelProperty(value = "集群Id")
private Long clusterId;
@ApiModelProperty(value = "集群名称")
private String clusterName;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "申请人")
private String applicant;
@ApiModelProperty(value = "预计峰值流量(MB/s)")
private Long predictBytesIn;
@ApiModelProperty(value = "近24小时峰值流量(MB/s)")
private Long realBytesIn;
@ApiModelProperty(value = "当前分区数")
private Integer partitionNum;
@ApiModelProperty(value = "当前Topic所处的Region")
private List<String> regionNameList;
@ApiModelProperty(value = "Region的brokerId列表")
private List<Integer> regionBrokerIdList;
@ApiModelProperty(value = "Topic的brokerId列表")
private List<Integer> brokerIdList;
@ApiModelProperty(value = "备注信息")
private String description;
@ApiModelProperty(value = "工单状态, 0:待处理, 1:通过, 2:拒绝, 3:撤销")
private Integer orderStatus;
@ApiModelProperty(value = "审批人")
private String approver;
@ApiModelProperty(value = "审批意见")
private String approvalOpinions;
@ApiModelProperty(value = "创建时间")
private Long gmtCreate;
@ApiModelProperty(value = "修改时间")
private Long gmtModify;
public Long getOrderId() {
return orderId;
}
public void setOrderId(Long orderId) {
this.orderId = orderId;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public String getApplicant() {
return applicant;
}
public void setApplicant(String applicant) {
this.applicant = applicant;
}
public Long getPredictBytesIn() {
return predictBytesIn;
}
public void setPredictBytesIn(Long predictBytesIn) {
this.predictBytesIn = predictBytesIn;
}
public Long getRealBytesIn() {
return realBytesIn;
}
public void setRealBytesIn(Long realBytesIn) {
this.realBytesIn = realBytesIn;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public List<String> getRegionNameList() {
return regionNameList;
}
public void setRegionNameList(List<String> regionNameList) {
this.regionNameList = regionNameList;
}
public List<Integer> getRegionBrokerIdList() {
return regionBrokerIdList;
}
public void setRegionBrokerIdList(List<Integer> regionBrokerIdList) {
this.regionBrokerIdList = regionBrokerIdList;
}
public List<Integer> getBrokerIdList() {
return brokerIdList;
}
public void setBrokerIdList(List<Integer> brokerIdList) {
this.brokerIdList = brokerIdList;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Integer getOrderStatus() {
return orderStatus;
}
public void setOrderStatus(Integer orderStatus) {
this.orderStatus = orderStatus;
}
public String getApprover() {
return approver;
}
public void setApprover(String approver) {
this.approver = approver;
}
public String getApprovalOpinions() {
return approvalOpinions;
}
public void setApprovalOpinions(String approvalOpinions) {
this.approvalOpinions = approvalOpinions;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Long getGmtModify() {
return gmtModify;
}
public void setGmtModify(Long gmtModify) {
this.gmtModify = gmtModify;
}
@Override
public String toString() {
return "OrderPartitionVO{" +
"orderId=" + orderId +
", clusterId=" + clusterId +
", clusterName='" + clusterName + '\'' +
", topicName='" + topicName + '\'' +
", applicant='" + applicant + '\'' +
", predictBytesIn=" + predictBytesIn +
", realBytesIn=" + realBytesIn +
", partitionNum=" + partitionNum +
", regionNameList=" + regionNameList +
", regionBrokerIdList=" + regionBrokerIdList +
", brokerIdList=" + brokerIdList +
", description='" + description + '\'' +
", orderStatus=" + orderStatus +
", approver='" + approver + '\'' +
", approvalOpinions='" + approvalOpinions + '\'' +
", gmtCreate=" + gmtCreate +
", gmtModify=" + gmtModify +
'}';
}
}

View File

@@ -0,0 +1,229 @@
package com.xiaojukeji.kafka.manager.web.vo.order;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* @author zengqiao
* @date 19/6/18
*/
@ApiModel(value = "Topic工单")
public class OrderTopicVO {
@ApiModelProperty(value = "工单ID")
private Long orderId;
@ApiModelProperty(value = "集群Id")
private Long clusterId;
@ApiModelProperty(value = "集群名称")
private String clusterName;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "流量上限(KB)")
private Long peakBytesIn;
@ApiModelProperty(value = "保留时间")
private Long retentionTime;
private Integer partitionNum;
private Integer replicaNum;
private String regions;
private String brokers;
@ApiModelProperty(value = "申请人")
private String applicant;
@ApiModelProperty(value = "负责人")
private String principals;
@ApiModelProperty(value = "备注信息")
private String description;
@ApiModelProperty(value = "工单状态, 0:待处理, 1:通过, 2:拒绝, 3:撤销")
private Integer orderStatus;
@ApiModelProperty(value = "审批人")
private String approver;
@ApiModelProperty(value = "审批意见")
private String opinion;
@ApiModelProperty(value = "创建时间")
private Long gmtCreate;
@ApiModelProperty(value = "修改时间")
private Long gmtModify;
public Long getOrderId() {
return orderId;
}
public void setOrderId(Long orderId) {
this.orderId = orderId;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Long getPeakBytesIn() {
return peakBytesIn;
}
public void setPeakBytesIn(Long peakBytesIn) {
this.peakBytesIn = peakBytesIn;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Integer getReplicaNum() {
return replicaNum;
}
public void setReplicaNum(Integer replicaNum) {
this.replicaNum = replicaNum;
}
public String getRegions() {
return regions;
}
public void setRegions(String regions) {
this.regions = regions;
}
public String getBrokers() {
return brokers;
}
public void setBrokers(String brokers) {
this.brokers = brokers;
}
public String getApplicant() {
return applicant;
}
public void setApplicant(String applicant) {
this.applicant = applicant;
}
public String getPrincipals() {
return principals;
}
public void setPrincipals(String principals) {
this.principals = principals;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Integer getOrderStatus() {
return orderStatus;
}
public void setOrderStatus(Integer orderStatus) {
this.orderStatus = orderStatus;
}
public String getApprover() {
return approver;
}
public void setApprover(String approver) {
this.approver = approver;
}
public String getOpinion() {
return opinion;
}
public void setOpinion(String opinion) {
this.opinion = opinion;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Long getGmtModify() {
return gmtModify;
}
public void setGmtModify(Long gmtModify) {
this.gmtModify = gmtModify;
}
@Override
public String toString() {
return "OrderTopicVO{" +
"orderId=" + orderId +
", clusterId=" + clusterId +
", clusterName='" + clusterName + '\'' +
", topicName='" + topicName + '\'' +
", peakBytesIn=" + peakBytesIn +
", retentionTime=" + retentionTime +
", partitionNum=" + partitionNum +
", replicaNum=" + replicaNum +
", regions='" + regions + '\'' +
", brokers='" + brokers + '\'' +
", applicant='" + applicant + '\'' +
", principals='" + principals + '\'' +
", description='" + description + '\'' +
", orderStatus=" + orderStatus +
", approver='" + approver + '\'' +
", opinion='" + opinion + '\'' +
", gmtCreate=" + gmtCreate +
", gmtModify=" + gmtModify +
'}';
}
}

View File

@@ -0,0 +1,138 @@
package com.xiaojukeji.kafka.manager.web.vo.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* Topic的基本信息
* @author zengqiao
* @date 19/4/1
*/
@ApiModel(value = "Topic基本信息")
public class TopicBasicVO {
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "分区数")
private Integer partitionNum;
@ApiModelProperty(value = "副本数")
private Integer replicaNum;
@ApiModelProperty(value = "占用Broker数")
private Integer brokerNum;
@ApiModelProperty(value = "保留时间(ms)")
private Long retentionTime;
@ApiModelProperty(value = "修改时间")
private Long modifyTime;
@ApiModelProperty(value = "创建时间")
private Long createTime;
@ApiModelProperty(value = "负责人")
private String principals;
@ApiModelProperty(value = "备注")
private String description;
@ApiModelProperty(value = "regionNames")
private String regionNames;
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Integer getReplicaNum() {
return replicaNum;
}
public void setReplicaNum(Integer replicaNum) {
this.replicaNum = replicaNum;
}
public Integer getBrokerNum() {
return brokerNum;
}
public void setBrokerNum(Integer brokerNum) {
this.brokerNum = brokerNum;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public Long getModifyTime() {
return modifyTime;
}
public void setModifyTime(Long modifyTime) {
this.modifyTime = modifyTime;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
public String getPrincipals() {
return principals;
}
public void setPrincipals(String principals) {
this.principals = principals;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getRegionNames() {
return regionNames;
}
public void setRegionNames(String regionNames) {
this.regionNames = regionNames;
}
@Override
public String toString() {
return "TopicBasicVO{" +
"topicName='" + topicName + '\'' +
", partitionNum=" + partitionNum +
", replicaNum=" + replicaNum +
", brokerNum=" + brokerNum +
", retentionTime=" + retentionTime +
", modifyTime=" + modifyTime +
", createTime=" + createTime +
", principals='" + principals + '\'' +
", description='" + description + '\'' +
", regionNames='" + regionNames + '\'' +
'}';
}
}

View File

@@ -0,0 +1,80 @@
package com.xiaojukeji.kafka.manager.web.vo.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.List;
/**
* Topic所在Broker的信息
* @author zengqiao
* @date 19/4/3
*/
@ApiModel(value = "TopicBroker信息")
public class TopicBrokerVO {
@ApiModelProperty(value = "brokerId")
private Integer brokerId;
@ApiModelProperty(value = "broker主机名")
private String host;
@ApiModelProperty(value = "分区数")
private Integer partitionNum;
@ApiModelProperty(value = "分区的Id")
private List<Integer> partitionIdList;
@ApiModelProperty(value = "leader分区的Id")
private List<Integer> leaderPartitionIdList;
public Integer getBrokerId() {
return brokerId;
}
public void setBrokerId(Integer brokerId) {
this.brokerId = brokerId;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public List<Integer> getPartitionIdList() {
return partitionIdList;
}
public void setPartitionIdList(List<Integer> partitionIdList) {
this.partitionIdList = partitionIdList;
}
public List<Integer> getLeaderPartitionIdList() {
return leaderPartitionIdList;
}
public void setLeaderPartitionIdList(List<Integer> leaderPartitionIdList) {
this.leaderPartitionIdList = leaderPartitionIdList;
}
@Override
public String toString() {
return "TopicBrokerVO{" +
"brokerId=" + brokerId +
", host='" + host + '\'' +
", partitionNum=" + partitionNum +
", partitionIdList=" + partitionIdList +
", leaderPartitionIdList=" + leaderPartitionIdList +
'}';
}
}

View File

@@ -0,0 +1,29 @@
package com.xiaojukeji.kafka.manager.web.vo.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* @author zengqiao
* @date 19/4/3
*/
@ApiModel(value = "Topic采样数据")
public class TopicDataSampleVO {
@ApiModelProperty(value = "Topic数据")
private String value;
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
@Override
public String toString() {
return "TopicSampleVO{" +
"value='" + value + '\'' +
'}';
}
}

View File

@@ -0,0 +1,74 @@
package com.xiaojukeji.kafka.manager.web.vo.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
/**
* @author zengqiao
* @date 19/7/8
*/
@ApiModel(value = "Topic删除结果")
public class TopicDeleteVO implements Serializable {
@ApiModelProperty(value = "集群Id")
private Long clusterId;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "删除信息")
private String message;
@ApiModelProperty(value = "删除code")
private Integer code;
public TopicDeleteVO(Long clusterId, String topicName, String message, Integer code) {
this.clusterId = clusterId;
this.topicName = topicName;
this.message = message;
this.code = code;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public Integer getCode() {
return code;
}
public void setCode(Integer code) {
this.code = code;
}
@Override
public String toString() {
return "TopicDeleteInfoVO{" +
"clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", message='" + message + '\'' +
", code='" + code + '\'' +
'}';
}
}

View File

@@ -0,0 +1,121 @@
package com.xiaojukeji.kafka.manager.web.vo.topic;
import io.swagger.annotations.ApiModel;
import java.util.Date;
import java.util.List;
/**
* @author zengqiao
* @date 19/7/12
*/
@ApiModel(value = "TopicDetailVO", description = "Topic详情")
public class TopicDetailVO {
private Long clusterId;
private String topicName;
private List<String> principalList;
private String description;
private Long retentionTime;
private String properties;
private Integer replicaNum;
private Integer partitionNum;
private Long gmtCreate;
private Long gmtModify;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public List<String> getPrincipalList() {
return principalList;
}
public void setPrincipalList(List<String> principalList) {
this.principalList = principalList;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Long getRetentionTime() {
return retentionTime;
}
public void setRetentionTime(Long retentionTime) {
this.retentionTime = retentionTime;
}
public String getProperties() {
return properties;
}
public void setProperties(String properties) {
this.properties = properties;
}
public void setReplicaNum(Integer replicaNum) {
this.replicaNum = replicaNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Long getGmtModify() {
return gmtModify;
}
public void setGmtModify(Long gmtModify) {
this.gmtModify = gmtModify;
}
@Override
public String toString() {
return "TopicDetailVO{" +
"clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", principalList=" + principalList +
", description='" + description + '\'' +
", retentionTime=" + retentionTime +
", properties='" + properties + '\'' +
", replicaNum=" + replicaNum +
", partitionNum=" + partitionNum +
", gmtCreate=" + gmtCreate +
", gmtModify=" + gmtModify +
'}';
}
}

View File

@@ -0,0 +1,103 @@
package com.xiaojukeji.kafka.manager.web.vo.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.List;
/**
* @author zengqiao
* @date 20/1/7
*/
@ApiModel(value = "Topic元信息")
public class TopicMetadataVO {
@ApiModelProperty(value = "集群Id")
private Long clusterId;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "BrokerId列表")
private List<Integer> brokerIdList;
@ApiModelProperty(value = "副本数")
private Integer replicaNum;
@ApiModelProperty(value = "分区数")
private Integer partitionNum;
@ApiModelProperty(value = "修改时间")
private Long modifyTime;
@ApiModelProperty(value = "创建时间")
private Long createTime;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public List<Integer> getBrokerIdList() {
return brokerIdList;
}
public void setBrokerIdList(List<Integer> brokerIdList) {
this.brokerIdList = brokerIdList;
}
public Integer getReplicaNum() {
return replicaNum;
}
public void setReplicaNum(Integer replicaNum) {
this.replicaNum = replicaNum;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Long getModifyTime() {
return modifyTime;
}
public void setModifyTime(Long modifyTime) {
this.modifyTime = modifyTime;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
@Override
public String toString() {
return "TopicMetadataVO{" +
"clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", brokerIdList=" + brokerIdList +
", replicaNum=" + replicaNum +
", partitionNum=" + partitionNum +
", modifyTime=" + modifyTime +
", createTime=" + createTime +
'}';
}
}

View File

@@ -0,0 +1,89 @@
package com.xiaojukeji.kafka.manager.web.vo.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* @author huangyiminghappy@163.com
* @date 2019-03-20
*/
@ApiModel(value = "Topic流量信息")
public class TopicMetricsVO {
@ApiModelProperty(value = "每秒流入消息数")
private Double messagesInPerSec = 0.0;
@ApiModelProperty(value = "每秒流入字节数")
private Double bytesInPerSec = 0.0;
@ApiModelProperty(value = "每秒流出字节数")
private Double bytesOutPerSec = 0.0;
@ApiModelProperty(value = "每秒拒绝字节数")
private Double bytesRejectedPerSec = 0.0;
@ApiModelProperty(value = "每秒请求数")
private Double totalProduceRequestsPerSec = 0.0;
@ApiModelProperty(value = "创建时间")
private Long gmtCreate;
public Double getMessagesInPerSec() {
return messagesInPerSec;
}
public void setMessagesInPerSec(Double messagesInPerSec) {
this.messagesInPerSec = messagesInPerSec;
}
public Double getBytesInPerSec() {
return bytesInPerSec;
}
public void setBytesInPerSec(Double bytesInPerSec) {
this.bytesInPerSec = bytesInPerSec;
}
public Double getBytesOutPerSec() {
return bytesOutPerSec;
}
public void setBytesOutPerSec(Double bytesOutPerSec) {
this.bytesOutPerSec = bytesOutPerSec;
}
public Double getBytesRejectedPerSec() {
return bytesRejectedPerSec;
}
public void setBytesRejectedPerSec(Double bytesRejectedPerSec) {
this.bytesRejectedPerSec = bytesRejectedPerSec;
}
public Double getTotalProduceRequestsPerSec() {
return totalProduceRequestsPerSec;
}
public void setTotalProduceRequestsPerSec(Double totalProduceRequestsPerSec) {
this.totalProduceRequestsPerSec = totalProduceRequestsPerSec;
}
public Long getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Long gmtCreate) {
this.gmtCreate = gmtCreate;
}
@Override
public String toString() {
return "TopicMetricsVO{" +
"messagesInPerSec=" + messagesInPerSec +
", bytesInPerSec=" + bytesInPerSec +
", bytesOutPerSec=" + bytesOutPerSec +
", bytesRejectedPerSec=" + bytesRejectedPerSec +
", totalProduceRequestsPerSec=" + totalProduceRequestsPerSec +
", gmtCreate=" + gmtCreate +
'}';
}
}

View File

@@ -0,0 +1,85 @@
package com.xiaojukeji.kafka.manager.web.vo.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* @author huangyiminghappy@163.com
* @date 2019-03-26
*/
@ApiModel(value = "TopicOffset信息")
public class TopicOffsetVO {
@ApiModelProperty(value = "集群id")
private Long clusterId;
@ApiModelProperty(value = "topic名字")
private String topicName;
@ApiModelProperty(value = "分区编号")
private Integer partitionId;
@ApiModelProperty(value = "分区offset")
private Long offset;
@ApiModelProperty(value = "该offset对应的时间")
private Long timestamp;
public TopicOffsetVO(Long clusterId, String topicName, Integer partitionId, Long offset, Long timestamp) {
this.clusterId = clusterId;
this.topicName = topicName;
this.partitionId = partitionId;
this.offset = offset;
this.timestamp = timestamp;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Integer getPartitionId() {
return partitionId;
}
public void setPartitionId(Integer partitionId) {
this.partitionId = partitionId;
}
public Long getOffset() {
return offset;
}
public void setOffset(Long offset) {
this.offset = offset;
}
public Long getTimestamp() {
return timestamp;
}
public void setTimestamp(Long timestamp) {
this.timestamp = timestamp;
}
@Override
public String toString() {
return "TopicOffsetVO{" +
"clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", partitionId=" + partitionId +
", offset=" + offset +
", timestamp=" + timestamp +
'}';
}
}

View File

@@ -0,0 +1,138 @@
package com.xiaojukeji.kafka.manager.web.vo.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* Topic信息
* @author zengqiao
* @date 19/4/1
*/
@ApiModel(value = "TopicOverviewVO", description = "Topic信息")
public class TopicOverviewVO {
@ApiModelProperty(value = "集群ID")
private Long clusterId;
@ApiModelProperty(value = "集群名称")
private String clusterName;
@ApiModelProperty(value = "Topic名称")
private String topicName;
@ApiModelProperty(value = "副本数")
private Integer replicaNum;
@ApiModelProperty(value = "分区数")
private Integer partitionNum;
@ApiModelProperty(value = "每秒流入流量(B)")
private Double byteIn;
@ApiModelProperty(value = "发送请求数(个/秒)")
private Double produceRequest;
@ApiModelProperty(value = "Topic更新时间")
private Long updateTime;
@ApiModelProperty(value = "负责人")
private String principals;
@ApiModelProperty(value = "TRUE:已收藏的, FALSE:未收藏")
private Boolean favorite;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Integer getReplicaNum() {
return replicaNum;
}
public void setReplicaNum(Integer replicaNum) {
this.replicaNum = replicaNum;
}
public Integer getPartitionNum() {
return partitionNum;
}
public void setPartitionNum(Integer partitionNum) {
this.partitionNum = partitionNum;
}
public Double getByteIn() {
return byteIn;
}
public void setByteIn(Double byteIn) {
this.byteIn = byteIn;
}
public Double getProduceRequest() {
return produceRequest;
}
public void setProduceRequest(Double produceRequest) {
this.produceRequest = produceRequest;
}
public Long getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Long updateTime) {
this.updateTime = updateTime;
}
public String getPrincipals() {
return principals;
}
public void setPrincipals(String principals) {
this.principals = principals;
}
public Boolean getFavorite() {
return favorite;
}
public void setFavorite(Boolean favorite) {
this.favorite = favorite;
}
@Override
public String toString() {
return "TopicOverviewVO{" +
"clusterId=" + clusterId +
", clusterName='" + clusterName + '\'' +
", topicName='" + topicName + '\'' +
", replicaNum=" + replicaNum +
", partitionNum=" + partitionNum +
", byteIn=" + byteIn +
", produceRequest=" + produceRequest +
", updateTime=" + updateTime +
", principals='" + principals + '\'' +
", favorite=" + favorite +
'}';
}
}

View File

@@ -0,0 +1,115 @@
package com.xiaojukeji.kafka.manager.web.vo.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.List;
/**
* @author arthur
* @date 2017/6/6.
*/
@ApiModel(value = "分区信息")
public class TopicPartitionVO {
@ApiModelProperty(value = "分区Id")
private Integer partitionId;
@ApiModelProperty(value = "offset偏移")
private Long offset;
@ApiModelProperty(value = "分区leader所在Broker")
private Integer leaderBrokerId;
@ApiModelProperty(value = "首选leader的Broker")
private Integer preferredBrokerId;
@ApiModelProperty(value = "leaderEpoch")
private Integer leaderEpoch;
@ApiModelProperty(value = "replica")
private List<Integer> replicaBrokerIdList;
@ApiModelProperty(value = "ISR")
private List<Integer> isrBrokerIdList;
@ApiModelProperty(value = "True:未同步, False:已同步")
private Boolean underReplicated;
public Integer getPartitionId() {
return partitionId;
}
public void setPartitionId(Integer partitionId) {
this.partitionId = partitionId;
}
public Long getOffset() {
return offset;
}
public void setOffset(Long offset) {
this.offset = offset;
}
public Integer getLeaderBrokerId() {
return leaderBrokerId;
}
public void setLeaderBrokerId(Integer leaderBrokerId) {
this.leaderBrokerId = leaderBrokerId;
}
public Integer getPreferredBrokerId() {
return preferredBrokerId;
}
public void setPreferredBrokerId(Integer preferredBrokerId) {
this.preferredBrokerId = preferredBrokerId;
}
public Integer getLeaderEpoch() {
return leaderEpoch;
}
public void setLeaderEpoch(Integer leaderEpoch) {
this.leaderEpoch = leaderEpoch;
}
public List<Integer> getReplicaBrokerIdList() {
return replicaBrokerIdList;
}
public void setReplicaBrokerIdList(List<Integer> replicaBrokerIdList) {
this.replicaBrokerIdList = replicaBrokerIdList;
}
public List<Integer> getIsrBrokerIdList() {
return isrBrokerIdList;
}
public void setIsrBrokerIdList(List<Integer> isrBrokerIdList) {
this.isrBrokerIdList = isrBrokerIdList;
}
public boolean isUnderReplicated() {
return underReplicated;
}
public void setUnderReplicated(boolean underReplicated) {
this.underReplicated = underReplicated;
}
@Override
public String toString() {
return "TopicPartitionVO{" +
"partitionId=" + partitionId +
", offset=" + offset +
", leaderBrokerId=" + leaderBrokerId +
", preferredBrokerId=" + preferredBrokerId +
", leaderEpoch=" + leaderEpoch +
", replicaBrokerIdList=" + replicaBrokerIdList +
", isrBrokerIdList=" + isrBrokerIdList +
", underReplicated=" + underReplicated +
'}';
}
}

View File

@@ -0,0 +1,107 @@
package com.xiaojukeji.kafka.manager.web.vo.topic;
import io.swagger.annotations.ApiModel;
import java.util.List;
/**
* Topic实时流量信息
* @author zengqiao
* @date 19/4/1
*/
@ApiModel(value = "Topic实时流量信息")
public class TopicRealTimeMetricsVO {
private List<Double> messageIn;
private List<Double> byteIn;
private List<Double> byteOut;
private List<Double> byteRejected;
private List<Double> failedFetchRequest;
private List<Double> failedProduceRequest;
private List<Double> totalProduceRequest;
private List<Double> totalFetchRequest;
public List<Double> getMessageIn() {
return messageIn;
}
public void setMessageIn(List<Double> messageIn) {
this.messageIn = messageIn;
}
public List<Double> getByteIn() {
return byteIn;
}
public void setByteIn(List<Double> byteIn) {
this.byteIn = byteIn;
}
public List<Double> getByteOut() {
return byteOut;
}
public void setByteOut(List<Double> byteOut) {
this.byteOut = byteOut;
}
public List<Double> getByteRejected() {
return byteRejected;
}
public void setByteRejected(List<Double> byteRejected) {
this.byteRejected = byteRejected;
}
public List<Double> getFailedFetchRequest() {
return failedFetchRequest;
}
public void setFailedFetchRequest(List<Double> failedFetchRequest) {
this.failedFetchRequest = failedFetchRequest;
}
public List<Double> getFailedProduceRequest() {
return failedProduceRequest;
}
public void setFailedProduceRequest(List<Double> failedProduceRequest) {
this.failedProduceRequest = failedProduceRequest;
}
public List<Double> getTotalProduceRequest() {
return totalProduceRequest;
}
public void setTotalProduceRequest(List<Double> totalProduceRequest) {
this.totalProduceRequest = totalProduceRequest;
}
public List<Double> getTotalFetchRequest() {
return totalFetchRequest;
}
public void setTotalFetchRequest(List<Double> totalFetchRequest) {
this.totalFetchRequest = totalFetchRequest;
}
@Override
public String toString() {
return "TopicRealTimeMetricsVO{" +
"messageIn=" + messageIn +
", byteIn=" + byteIn +
", byteOut=" + byteOut +
", byteRejected=" + byteRejected +
", failedFetchRequest=" + failedFetchRequest +
", failedProduceRequest=" + failedProduceRequest +
", totalProduceRequest=" + totalProduceRequest +
", totalFetchRequest=" + totalFetchRequest +
'}';
}
}

View File

@@ -0,0 +1,32 @@
server:
port: 8080
tomcat:
accept-count: 100
max-connections: 1000
max-threads: 20
min-spare-threads: 20
spring:
application:
name: kafkamanager
datasource:
kafka-manager:
jdbc-url: jdbc:mysql://localhost:3306/kafka_manager?characterEncoding=UTF-8&serverTimezone=GMT%2B8
username: admin
password: admin
driver-class-name: org.mariadb.jdbc.Driver
main:
allow-bean-definition-overriding: true
profiles:
active: dev
logging:
config: classpath:logback-spring.xml
# kafka监控
kafka-monitor:
enabled: true
notify-kafka:
cluster-id: 95
topic-name: kmo_monitor

View File

@@ -0,0 +1,63 @@
<assembly>
<id>assembly</id>
<formats>
<format>tar</format>
<format>zip</format>
</formats>
<fileSets>
<fileSet>
<directory>src/main/resources/bin</directory>
<outputDirectory>bin</outputDirectory>
<includes>
<include>control.sh</include>
<include>start.bat</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
<directory>src/main/resources</directory>
<outputDirectory>config</outputDirectory>
<includes>
<include>*.properties</include>
<include>*.xml</include>
<include>*.yml</include>
<include>env/dev/*</include>
<include>env/qa/*</include>
<include>env/uat/*</include>
<include>env/prod/*</include>
</includes>
</fileSet>
<fileSet>
<directory>target</directory>
<outputDirectory>lib</outputDirectory>
<includes>
<!--
<include>*release*.jar</include>
-->
<include>kafka-manager-web*.jar</include>
</includes>
<excludes>
<exclude>*sources.jar</exclude>
</excludes>
</fileSet>
<fileSet>
<directory>src/main/resources</directory>
<outputDirectory>logs</outputDirectory>
<fileMode>0755</fileMode>
<excludes>
<exclude>**/*</exclude>
</excludes>
</fileSet>
<!-- <fileSet>
<directory>${project.build.directory}/asciidoc</directory>
<outputDirectory>docs</outputDirectory>
<includes>
<include>md/*</include>
<include>html/*</include>
<include>pdf/*</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>-->
</fileSets>
</assembly>

View File

@@ -0,0 +1,196 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="10 seconds">
<contextName>logback</contextName>
<property name="log.path" value="./logs" />
<!-- 彩色日志 -->
<!-- 彩色日志依赖的渲染类 -->
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<!-- 彩色日志格式 -->
<property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<!--输出到控制台-->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>info</level>
</filter>
<encoder>
<Pattern>${CONSOLE_LOG_PATTERN}</Pattern>
<charset>UTF-8</charset>
</encoder>
</appender>
<!--输出到文件-->
<!-- 时间滚动输出 level为 DEBUG 日志 -->
<appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/log_debug.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志归档 -->
<fileNamePattern>${log.path}/log_debug_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文件只记录debug级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>debug</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 时间滚动输出 level为 INFO 日志 -->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文件的路径及文件名 -->
<file>${log.path}/log_info.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 每天日志归档路径以及格式 -->
<fileNamePattern>${log.path}/log_info_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文件只记录info级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>info</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 时间滚动输出 level为 WARN 日志 -->
<appender name="WARN_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文件的路径及文件名 -->
<file>${log.path}/log_warn.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/log_warn_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文件只记录warn级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>warn</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 时间滚动输出 level为 ERROR 日志 -->
<appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文件的路径及文件名 -->
<file>${log.path}/log_error.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/log_error_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文件只记录ERROR级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- Metrics信息收集日志 -->
<appender name="COLLECTOR_METRICS_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/metrics/collector_metrics.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/metrics/collector_metrics_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<maxHistory>15</maxHistory>
</rollingPolicy>
</appender>
<!-- Metrics信息收集日志 -->
<appender name="API_METRICS_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/metrics/api_metrics.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/metrics/api_metrics_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<maxHistory>15</maxHistory>
</rollingPolicy>
</appender>
<logger name="COLLECTOR_METRICS_LOGGER" level="DEBUG" additivity="false">
<appender-ref ref="COLLECTOR_METRICS_LOGGER"/>
</logger>
<logger name="API_METRICS_LOGGER" level="DEBUG" additivity="false">
<appender-ref ref="API_METRICS_LOGGER"/>
</logger>
<logger name="org.apache.ibatis" level="INFO" additivity="false" />
<logger name="org.mybatis.spring" level="INFO" additivity="false" />
<logger name="com.github.miemiedev.mybatis.paginator" level="INFO" additivity="false" />
<root level="info">
<appender-ref ref="CONSOLE" />
<appender-ref ref="DEBUG_FILE" />
<appender-ref ref="INFO_FILE" />
<appender-ref ref="WARN_FILE" />
<appender-ref ref="ERROR_FILE" />
<!--<appender-ref ref="METRICS_LOG" />-->
</root>
<!--生产环境:输出到文件-->
<!--<springProfile name="pro">-->
<!--<root level="info">-->
<!--<appender-ref ref="CONSOLE" />-->
<!--<appender-ref ref="DEBUG_FILE" />-->
<!--<appender-ref ref="INFO_FILE" />-->
<!--<appender-ref ref="ERROR_FILE" />-->
<!--<appender-ref ref="WARN_FILE" />-->
<!--</root>-->
<!--</springProfile>-->
</configuration>