kafka-manager 2.0

This commit is contained in:
zengqiao
2020-09-28 15:46:34 +08:00
parent 28d985aaf1
commit c6e4b60424
1253 changed files with 82183 additions and 37179 deletions

View File

@@ -0,0 +1,29 @@
package com.xiaojukeji.kafka.manager.web;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
/**
* 启动SpringBoot
* @author huangyiminghappy@163.com
* @date 2019-04-24
*/
@EnableAsync
@EnableScheduling
@ServletComponentScan
@EnableAutoConfiguration
@SpringBootApplication(scanBasePackages = {"com.xiaojukeji.kafka.manager"})
public class MainApplication {
public static void main(String[] args) {
try {
SpringApplication sa = new SpringApplication(MainApplication.class);
sa.run(args);
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,29 @@
package com.xiaojukeji.kafka.manager.web.api;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import springfox.documentation.annotations.ApiIgnore;
/**
* @author zengqiao
* @date 20/6/18
*/
@ApiIgnore
@Api(description = "web应用探活接口(REST)")
@RestController
@RequestMapping("api/")
public class HealthController {
@ApiIgnore
@RequestMapping(path = "health", method = RequestMethod.GET)
@ResponseBody
@ApiOperation(value = "探活接口", notes = "")
public Result<String> health() {
return Result.buildSuc();
}
}

View File

@@ -0,0 +1,85 @@
package com.xiaojukeji.kafka.manager.web.api.versionone;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.ao.account.Account;
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.LoginDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.AccountVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.account.LoginService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* 登陆
* @author huangyiminghappy@163.com
* @date 19/4/29
*/
@Api(tags = "SSO-Login相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_SSO_PREFIX)
public class LoginController {
private static final Logger LOGGER = LoggerFactory.getLogger(LoginController.class);
@Autowired
private LoginService loginService;
@ApiOperation(value = "登陆", notes = "")
@RequestMapping(value = "login", method = RequestMethod.POST)
@ResponseBody
public Result<AccountVO> login(HttpServletRequest request,
HttpServletResponse response,
@RequestBody LoginDTO dto){
Account account = loginService.login(request, response, dto);
if (ValidateUtils.isNull(account)) {
return Result.buildFrom(ResultStatus.LOGIN_FAILED);
}
AccountVO vo = new AccountVO();
vo.setUsername(account.getUsername());
vo.setChineseName(account.getChineseName());
vo.setDepartment(account.getDepartment());
vo.setRole(account.getAccountRoleEnum().getRole());
return new Result<>(vo);
}
@ApiOperation(value = "登出", notes = "")
@RequestMapping(value = "logout", method = RequestMethod.DELETE)
@ResponseBody
public Result logoff(HttpServletRequest request, HttpServletResponse response) {
loginService.logout(request, response, true);
return new Result();
}
@Deprecated
@ApiOperation(value = "登录检查", notes = "检查SSO返回的Code")
@RequestMapping(value = "xiaojukeji/login-check", method = RequestMethod.POST)
@ResponseBody
public Result<AccountVO> checkCodeAndGetStaffInfo(HttpServletRequest request,
HttpServletResponse response,
@RequestBody LoginDTO dto) {
Result<AccountVO> ra = login(request, response, dto);
if (!Constant.SUCCESS.equals(ra.getCode())) {
LOGGER.info("user login failed, req:{} result:{}.", dto, ra);
} else {
LOGGER.info("user login success, req:{} result:{}.", dto, ra);
}
return ra;
}
@Deprecated
@ApiOperation(value = "登出", notes = "")
@RequestMapping(value = "xiaojukeji/logout", method = RequestMethod.DELETE)
@ResponseBody
public Result logout(HttpServletRequest request, HttpServletResponse response) {
return logoff(request, response);
}
}

View File

@@ -0,0 +1,52 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.gateway;
import com.alibaba.fastjson.JSON;
import com.xiaojukeji.kafka.manager.common.annotations.ApiLevel;
import com.xiaojukeji.kafka.manager.common.constant.ApiLevelContent;
import com.xiaojukeji.kafka.manager.common.entity.DeprecatedResponseResult;
import com.xiaojukeji.kafka.manager.common.entity.dto.gateway.TopicConnectionDTO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.service.service.gateway.TopicConnectionService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 20/7/6
*/
@Api(tags = "GATEWAY-WEB相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.GATEWAY_API_V1_PREFIX)
public class GatewayHeartbeatController {
private final static Logger LOGGER = LoggerFactory.getLogger(GatewayHeartbeatController.class);
@Autowired
private TopicConnectionService topicConnectionService;
@ApiLevel(level = ApiLevelContent.LEVEL_NORMAL_3)
@ApiOperation(value = "连接信息上报入口", notes = "Broker主动上报信息")
@RequestMapping(value = "heartbeat/survive-user", method = RequestMethod.POST)
@ResponseBody
public DeprecatedResponseResult receiveTopicConnections(@RequestParam("clusterId") String clusterId,
@RequestParam("brokerId") String brokerId,
@RequestBody List<TopicConnectionDTO> dtoList) {
try {
if (ValidateUtils.isEmptyList(dtoList)) {
return DeprecatedResponseResult.success("success");
}
topicConnectionService.batchAdd(dtoList);
return DeprecatedResponseResult.success("success");
} catch (Exception e) {
LOGGER.error("receive topic connections failed, clusterId:{} brokerId:{} req:{}",
clusterId, brokerId, JSON.toJSONString(dtoList), e);
}
return DeprecatedResponseResult.failure("fail");
}
}

View File

@@ -0,0 +1,49 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.gateway;
import com.xiaojukeji.kafka.manager.common.annotations.ApiLevel;
import com.xiaojukeji.kafka.manager.common.constant.ApiLevelContent;
import com.xiaojukeji.kafka.manager.common.entity.DeprecatedResponseResult;
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.TopicReportDO;
import com.xiaojukeji.kafka.manager.service.service.gateway.TopicReportService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 20/7/7
*/
@Api(tags = "GATEWAY-开启JMX上报相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.GATEWAY_API_V1_PREFIX)
public class GatewayReportController {
private final static Logger LOGGER = LoggerFactory.getLogger(GatewayReportController.class);
@Autowired
private TopicReportService topicReportService;
@ApiLevel(level = ApiLevelContent.LEVEL_IMPORTANT_2)
@ApiOperation(value = "查询开启JMX采集的Topic", notes = "")
@RequestMapping(value = "report/jmx/topics", method = RequestMethod.GET)
@ResponseBody
public DeprecatedResponseResult getJmxReportTopics(@RequestParam("clusterId") Long clusterId) {
List<TopicReportDO> doList = topicReportService.getNeedReportTopic(clusterId);
if (ValidateUtils.isEmptyList(doList)) {
return DeprecatedResponseResult.success();
}
List<String> topicNameList = new ArrayList<>();
for (TopicReportDO elem: doList) {
topicNameList.add(elem.getTopicName());
}
return DeprecatedResponseResult.success(ListUtils.strList2String(topicNameList));
}
}

View File

@@ -0,0 +1,93 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.gateway;
import com.alibaba.fastjson.JSON;
import com.xiaojukeji.kafka.manager.common.annotations.ApiLevel;
import com.xiaojukeji.kafka.manager.common.constant.ApiLevelContent;
import com.xiaojukeji.kafka.manager.common.entity.DeprecatedResponseResult;
import com.xiaojukeji.kafka.manager.common.entity.dto.gateway.KafkaAclSearchDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.gateway.KafkaUserSearchDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.gateway.KafkaSecurityVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.KafkaAclDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.KafkaUserDO;
import com.xiaojukeji.kafka.manager.service.service.gateway.SecurityService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.GatewayModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 20/7/7
*/
@Api(tags = "GATEWAY-WEB-权限相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.GATEWAY_API_V1_PREFIX)
public class GatewaySecurityController {
private final static Logger LOGGER = LoggerFactory.getLogger(GatewaySecurityController.class);
@Autowired
private SecurityService securityService;
@ApiLevel(level = ApiLevelContent.LEVEL_VIP_1)
@ApiOperation(value = "Kafka用户查询", notes = "")
@RequestMapping(value = "security/users", method = RequestMethod.POST)
@ResponseBody
public DeprecatedResponseResult<String> getKafkaUsers(@RequestBody KafkaUserSearchDTO dto) {
if (ValidateUtils.isNull(dto) || !dto.paramLegal()) {
return DeprecatedResponseResult.failure("invalid request");
}
try {
List<KafkaUserDO> doList = securityService.getKafkaUsers(
dto.getStart(),
dto.getEnd().equals(0L)? System.currentTimeMillis(): dto.getEnd()
);
if (ValidateUtils.isEmptyList(doList)) {
return DeprecatedResponseResult.success();
}
KafkaSecurityVO vo = new KafkaSecurityVO();
vo.setRows(new ArrayList<>(GatewayModelConverter.convert2KafkaUserVOList(doList)));
return DeprecatedResponseResult.success(JSON.toJSONString(vo));
} catch (Exception e) {
LOGGER.error("get kafka users failed, req:{}.", dto, e);
return DeprecatedResponseResult.failure("get kafka users exception");
}
}
@ApiLevel(level = ApiLevelContent.LEVEL_IMPORTANT_2)
@ApiOperation(value = "Kafka用户权限查询", notes = "")
@RequestMapping(value = "security/acls", method = RequestMethod.POST)
@ResponseBody
public DeprecatedResponseResult<String> getKafkaAcls(@RequestBody KafkaAclSearchDTO dto) {
if (ValidateUtils.isNull(dto) || !dto.paramLegal()) {
return DeprecatedResponseResult.failure("invalid request");
}
try {
List<KafkaAclDO> doList = securityService.getKafkaAcls(
dto.getClusterId(),
dto.getStart(),
dto.getEnd().equals(0L)? System.currentTimeMillis(): dto.getEnd()
);
if (ValidateUtils.isEmptyList(doList)) {
return DeprecatedResponseResult.success();
}
KafkaSecurityVO vo = new KafkaSecurityVO();
vo.setRows(new ArrayList<>(GatewayModelConverter.convert2KafkaAclVOList(doList)));
return DeprecatedResponseResult.success(JSON.toJSONString(vo));
} catch (Exception e) {
LOGGER.error("get kafka acls failed, req:{}.", dto, e);
return DeprecatedResponseResult.failure("get kafka acls exception");
}
}
}

View File

@@ -0,0 +1,174 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.gateway;
import com.alibaba.fastjson.JSON;
import com.xiaojukeji.kafka.manager.common.annotations.ApiLevel;
import com.xiaojukeji.kafka.manager.common.bizenum.gateway.GatewayConfigKeyEnum;
import com.xiaojukeji.kafka.manager.common.constant.ApiLevelContent;
import com.xiaojukeji.kafka.manager.common.entity.DeprecatedResponseResult;
import com.xiaojukeji.kafka.manager.common.entity.ao.gateway.*;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.GatewayConfigDO;
import com.xiaojukeji.kafka.manager.common.entity.vo.gateway.GatewayConfigVO;
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.service.service.gateway.GatewayConfigService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* @author zengqiao
* @date 20/7/27
*/
@Api(tags = "GATEWAY-服务发现相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.GATEWAY_API_V1_PREFIX)
public class GatewayServiceDiscoveryController {
@Autowired
private GatewayConfigService gatewayConfigService;
@ApiLevel(level = ApiLevelContent.LEVEL_VIP_1)
@ApiOperation(value = "获取指定集群服务地址", notes = "")
@RequestMapping(value = "discovery/address", method = RequestMethod.GET)
@ResponseBody
public String getKafkaBootstrapServer(@RequestParam("clusterId") Long clusterId) {
if (ValidateUtils.isNull(clusterId)) {
return "";
}
GatewayConfigDO configDO = gatewayConfigService.getByTypeAndName(
GatewayConfigKeyEnum.SD_CLUSTER_ID.getConfigType(),
String.valueOf(clusterId)
);
if (ValidateUtils.isNull(configDO)) {
return "";
}
return configDO.getValue();
}
@ApiLevel(level = ApiLevelContent.LEVEL_VIP_1)
@ApiOperation(value = "获取集群服务地址", notes = "")
@RequestMapping(value = "discovery/init", method = RequestMethod.GET)
@ResponseBody
public DeprecatedResponseResult<String> getAllKafkaBootstrapServers() {
KafkaBootstrapServerConfig config =
gatewayConfigService.getKafkaBootstrapServersConfig(Long.MIN_VALUE);
if (ValidateUtils.isNull(config) || ValidateUtils.isNull(config.getClusterIdBootstrapServersMap())) {
return DeprecatedResponseResult.failure("call init kafka bootstrap servers failed");
}
if (config.getClusterIdBootstrapServersMap().isEmpty()) {
return DeprecatedResponseResult.success();
}
return DeprecatedResponseResult.success(JSON.toJSONString(config.getClusterIdBootstrapServersMap()));
}
@ApiLevel(level = ApiLevelContent.LEVEL_IMPORTANT_2)
@ApiOperation(value = "获取集群服务地址", notes = "")
@RequestMapping(value = "discovery/update", method = RequestMethod.GET)
@ResponseBody
public DeprecatedResponseResult getBootstrapServersIfNeeded(@RequestParam("versionNumber") long versionNumber) {
KafkaBootstrapServerConfig config =
gatewayConfigService.getKafkaBootstrapServersConfig(versionNumber);
if (ValidateUtils.isNull(config) || ValidateUtils.isNull(config.getClusterIdBootstrapServersMap())) {
return DeprecatedResponseResult.failure("call update kafka bootstrap servers failed");
}
if (config.getClusterIdBootstrapServersMap().isEmpty()) {
return DeprecatedResponseResult.success();
}
return DeprecatedResponseResult.success(JSON.toJSONString(new GatewayConfigVO(
String.valueOf(config.getVersion()),
JSON.toJSONString(config.getClusterIdBootstrapServersMap())
)));
}
@ApiLevel(level = ApiLevelContent.LEVEL_IMPORTANT_2)
@ApiOperation(value = "最大并发请求数", notes = "")
@RequestMapping(value = "discovery/max-request-num", method = RequestMethod.GET)
@ResponseBody
public DeprecatedResponseResult getMaxRequestNum(@RequestParam("versionNumber") long versionNumber) {
RequestQueueConfig config = gatewayConfigService.getRequestQueueConfig(versionNumber);
if (ValidateUtils.isNull(config)) {
return DeprecatedResponseResult.failure("call get request queue size config failed");
}
if (ValidateUtils.isNull(config.getMaxRequestQueueSize())) {
return DeprecatedResponseResult.success();
}
return DeprecatedResponseResult.success(JSON.toJSONString(
new GatewayConfigVO(
String.valueOf(config.getVersion()),
String.valueOf(config.getMaxRequestQueueSize())
)
));
}
@ApiLevel(level = ApiLevelContent.LEVEL_IMPORTANT_2)
@ApiOperation(value = "最大APP请求速率", notes = "")
@RequestMapping(value = "discovery/appId-rate", method = RequestMethod.GET)
@ResponseBody
public DeprecatedResponseResult getAppIdRate(@RequestParam("versionNumber") long versionNumber) {
AppRateConfig config = gatewayConfigService.getAppRateConfig(versionNumber);
if (ValidateUtils.isNull(config)) {
return DeprecatedResponseResult.failure("call get app rate config failed");
}
if (ValidateUtils.isNull(config.getAppRateLimit())) {
return DeprecatedResponseResult.success();
}
return DeprecatedResponseResult.success(JSON.toJSONString(
new GatewayConfigVO(
String.valueOf(config.getVersion()),
String.valueOf(config.getAppRateLimit())
)
));
}
@ApiLevel(level = ApiLevelContent.LEVEL_IMPORTANT_2)
@ApiOperation(value = "最大IP请求速率", notes = "")
@RequestMapping(value = "discovery/ip-rate", method = RequestMethod.GET)
@ResponseBody
public DeprecatedResponseResult getIpRate(@RequestParam("versionNumber") long versionNumber) {
IpRateConfig config = gatewayConfigService.getIpRateConfig(versionNumber);
if (ValidateUtils.isNull(config)) {
return DeprecatedResponseResult.failure("call get ip rate config failed");
}
if (ValidateUtils.isNull(config.getIpRateLimit())) {
return DeprecatedResponseResult.success();
}
return DeprecatedResponseResult.success(JSON.toJSONString(
new GatewayConfigVO(
String.valueOf(config.getVersion()),
String.valueOf(config.getIpRateLimit())
)
));
}
@ApiLevel(level = ApiLevelContent.LEVEL_IMPORTANT_2)
@ApiOperation(value = "最大SP请求速率", notes = "")
@RequestMapping(value = "discovery/sp-limit", method = RequestMethod.GET)
@ResponseBody
public DeprecatedResponseResult getSpLimit(@RequestParam("versionNumber") long versionNumber) {
SpRateConfig config =
gatewayConfigService.getSpRateConfig(versionNumber);
if (ValidateUtils.isNull(config) || ValidateUtils.isNull(config.getSpRateMap())) {
return DeprecatedResponseResult.failure("call update kafka bootstrap servers failed");
}
if (config.getSpRateMap().isEmpty()) {
return DeprecatedResponseResult.success();
}
List<String> strList = new ArrayList<>();
for (Map.Entry<String, Long> entry: config.getSpRateMap().entrySet()) {
strList.add(entry.getKey() + "#" + String.valueOf(entry.getValue()));
}
return DeprecatedResponseResult.success(JSON.toJSONString(new GatewayConfigVO(
String.valueOf(config.getVersion()),
ListUtils.strList2String(strList)
)));
}
}

View File

@@ -0,0 +1,58 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
import com.xiaojukeji.kafka.manager.account.AccountService;
import com.xiaojukeji.kafka.manager.account.common.EnterpriseStaff;
import com.xiaojukeji.kafka.manager.common.bizenum.AccountRoleEnum;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.AccountRoleVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.AccountSummaryVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 20/4/27
*/
@Api(tags = "Normal-Account相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
public class NormalAccountController {
@Autowired
private AccountService accountService;
@ApiOperation(value = "账号搜索", notes = "仅支持搜索, 不支持全部展示")
@RequestMapping(value = "accounts", method = RequestMethod.GET)
@ResponseBody
public Result<List<AccountSummaryVO>> searchOnJobStaffByKeyWord(@RequestParam("keyWord") String keyWord) {
List<EnterpriseStaff> staffList = accountService.searchAccountByPrefix(keyWord);
if (ValidateUtils.isEmptyList(staffList)) {
return new Result<>();
}
List<AccountSummaryVO> voList = new ArrayList<>();
for (EnterpriseStaff staff: staffList) {
AccountSummaryVO vo = new AccountSummaryVO();
vo.setUsername(staff.getUsername());
vo.setChineseName(staff.getChineseName());
vo.setDepartment(staff.getDepartment());
voList.add(vo);
}
return new Result<>(voList);
}
@ApiOperation(value = "查询角色", notes = "")
@RequestMapping(value = "accounts/account", method = RequestMethod.GET)
@ResponseBody
public Result<AccountRoleVO> searchAccount() {
String username = SpringTool.getUserName();
AccountRoleEnum accountRoleEnum = accountService.getAccountRoleFromCache(username);
return new Result<>(new AccountRoleVO(username, accountRoleEnum.getRole()));
}
}

View File

@@ -0,0 +1,160 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
import com.xiaojukeji.kafka.manager.common.annotations.ApiLevel;
import com.xiaojukeji.kafka.manager.common.bizenum.TopicAuthorityEnum;
import com.xiaojukeji.kafka.manager.common.constant.ApiLevelContent;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.ao.AppTopicDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.AppDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.QuotaVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.app.AppTopicAuthorityVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.app.AppTopicVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic.TopicConnectionVO;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.cache.PhysicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.gateway.AppService;
import com.xiaojukeji.kafka.manager.service.service.gateway.AuthorityService;
import com.xiaojukeji.kafka.manager.service.service.gateway.QuotaService;
import com.xiaojukeji.kafka.manager.service.service.gateway.TopicConnectionService;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.app.AppVO;
import com.xiaojukeji.kafka.manager.web.converters.AppConverter;
import com.xiaojukeji.kafka.manager.web.converters.TopicModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* @author zengqiao
* @date 20/4/7
*/
@Api(tags = "Normal-APP相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
public class NormalAppController {
@Autowired
private AppService appService;
@Autowired
private QuotaService quotaService;
@Autowired
private LogicalClusterMetadataManager logicalClusterMetadataManager;
@Autowired
private TopicConnectionService connectionService;
@Autowired
private AuthorityService authorityService;
@ApiLevel(level = ApiLevelContent.LEVEL_NORMAL_3, rateLimit = 1)
@ApiOperation(value = "App列表", notes = "")
@RequestMapping(value = "apps", method = RequestMethod.GET)
@ResponseBody
public Result<List<AppVO>> getApps() {
return new Result<>(AppConverter.convert2AppVOList(
appService.getByPrincipal(SpringTool.getUserName()))
);
}
@ApiOperation(value = "App基本信息", notes = "")
@RequestMapping(value = "apps/{appId}/basic-info", method = RequestMethod.GET)
@ResponseBody
public Result<AppVO> getAppBasicInfo(@PathVariable String appId) {
return new Result<>(AppConverter.convert2AppVO(
appService.getByAppId(appId))
);
}
@ApiOperation(value = "App修改", notes = "")
@RequestMapping(value = "apps", method = RequestMethod.PUT)
@ResponseBody
public Result modifyApp(@RequestBody AppDTO dto) {
return Result.buildFrom(
appService.updateByAppId(dto, SpringTool.getUserName(), false)
);
}
@ApiOperation(value = "有权限的Topic信息", notes = "null: 全部, true:我的Topic, false:非我的有权限的Topic")
@RequestMapping(value = "apps/{appId}/topics", method = RequestMethod.GET)
@ResponseBody
public Result<List<AppTopicVO>> getAppTopics(@PathVariable String appId,
@RequestParam(value = "mine") Boolean mine) {
List<AppTopicDTO> dtoList = appService.getAppTopicDTOList(appId, mine);
List<AppTopicVO> voList = new ArrayList<>();
for (AppTopicDTO dto : dtoList) {
if (TopicAuthorityEnum.DENY.getCode().equals(dto.getAccess())) {
continue;
}
AppTopicVO vo = new AppTopicVO();
CopyUtils.copyProperties(vo, dto);
vo.setClusterId(dto.getLogicalClusterId());
vo.setClusterName(dto.getLogicalClusterName());
voList.add(vo);
}
return new Result<>(voList);
}
@ApiOperation(value = "Quota查询", notes = "")
@RequestMapping(value = "apps/{appId}/quotas", method = RequestMethod.GET)
@ResponseBody
public Result<List<QuotaVO>> getAppIdQuota(
@PathVariable String appId,
@RequestParam(value = "clusterId") Long clusterId,
@RequestParam(value = "topicName") String topicName,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
if (!PhysicalClusterMetadataManager.isTopicExist(physicalClusterId, topicName)) {
return Result.buildFrom(ResultStatus.TOPIC_NOT_EXIST);
}
return new Result<>(AppConverter.convert2QuotaVOList(
clusterId, quotaService.getQuotaFromZk(physicalClusterId, topicName, appId))
);
}
@ApiOperation(value = "应用连接信息", notes = "")
@RequestMapping(value = "apps/{appId}/connections", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicConnectionVO>> getAppIdQuota(@PathVariable String appId) {
return new Result<>(TopicModelConverter.convert2TopicConnectionVOList(
connectionService.getByAppId(
appId,
new Date(System.currentTimeMillis() - Constant.TOPIC_CONNECTION_LATEST_TIME_MS),
new Date()))
);
}
@ApiOperation(value = "app对Topic权限信息", notes = "")
@RequestMapping(value = "apps/{appId}/authorities", method = RequestMethod.GET)
@ResponseBody
public Result<AppTopicAuthorityVO> getAppIdQuota(@PathVariable String appId,
@RequestParam(value = "clusterId") Long clusterId,
@RequestParam(value = "topicName") String topicName) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
if (!PhysicalClusterMetadataManager.isTopicExist(physicalClusterId, topicName)) {
return Result.buildFrom(ResultStatus.TOPIC_NOT_EXIST);
}
return new Result(AppConverter.convert2AppTopicAuthorityVO(
appId,
topicName,
authorityService.getAuthority(physicalClusterId, topicName, appId))
);
}
}

View File

@@ -0,0 +1,106 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.BillStaffDetailVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.BillStaffSummaryVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.BillTopicVO;
import com.xiaojukeji.kafka.manager.common.utils.DateUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.KafkaBillDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.LogicalClusterDO;
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.KafkaBillService;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.*;
/**
* @author zengqiao
* @date 20/4/26
*/
@Api(tags = "Normal-Bill相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
public class NormalBillController {
@Autowired
private KafkaBillService kafkaBillService;
@Autowired
private LogicalClusterMetadataManager logicalClusterMetadataManager;
@ApiOperation(value = "用户账单概览", notes = "")
@RequestMapping(value = "bills/staff-summary", method = RequestMethod.GET)
@ResponseBody
public Result<List<BillStaffSummaryVO>> getBillStaffSummary(@RequestParam("startTime") Long startTime,
@RequestParam("endTime") Long endTime) {
List<KafkaBillDO> kafkaBillDOList =
kafkaBillService.getByPrincipal(SpringTool.getUserName(), new Date(startTime), new Date(endTime));
if (ValidateUtils.isEmptyList(kafkaBillDOList)) {
return new Result<>();
}
Map<String, BillStaffSummaryVO> billMap = new TreeMap<>();
for (KafkaBillDO kafkaBillDO: kafkaBillDOList) {
BillStaffSummaryVO vo = billMap.get(kafkaBillDO.getGmtDay());
if (ValidateUtils.isNull(vo)) {
vo = new BillStaffSummaryVO();
vo.setUsername(SpringTool.getUserName());
vo.setTopicNum(0);
vo.setQuota(0.0);
vo.setCost(0.0);
vo.setGmtMonth(kafkaBillDO.getGmtDay());
vo.setTimestamp(kafkaBillDO.getGmtCreate().getTime());
billMap.put(kafkaBillDO.getGmtDay(), vo);
}
vo.setTopicNum(vo.getTopicNum() + 1);
vo.setQuota(vo.getQuota() + kafkaBillDO.getQuota());
vo.setCost(vo.getCost() + kafkaBillDO.getCost());
}
return new Result<>(new ArrayList<>(billMap.values()));
}
@ApiOperation(value = "用户账单详情", notes = "")
@RequestMapping(value = "bills/staff-detail", method = RequestMethod.GET)
@ResponseBody
public Result<BillStaffDetailVO> getBillStaffDetail(@RequestParam("timestamp") Long timestamp) {
List<KafkaBillDO> kafkaBillDOList =
kafkaBillService.getByGmtDay(DateUtils.getFormattedDate(timestamp).substring(0, 7));
if (ValidateUtils.isEmptyList(kafkaBillDOList)) {
return new Result<>();
}
String username = SpringTool.getUserName();
BillStaffDetailVO billStaffDetailVO = new BillStaffDetailVO();
billStaffDetailVO.setUsername(username);
billStaffDetailVO.setBillList(new ArrayList<>());
Double costSum = 0.0;
for (KafkaBillDO kafkaBillDO: kafkaBillDOList) {
if (!kafkaBillDO.getPrincipal().equals(username)) {
continue;
}
BillTopicVO vo = new BillTopicVO();
vo.setClusterName("unknown");
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getTopicLogicalCluster(
kafkaBillDO.getClusterId(),
kafkaBillDO.getTopicName()
);
if (!ValidateUtils.isNull(logicalClusterDO)) {
vo.setClusterId(logicalClusterDO.getId());
vo.setClusterName(logicalClusterDO.getName());
}
vo.setTopicName(kafkaBillDO.getTopicName());
vo.setQuota(kafkaBillDO.getQuota());
vo.setCost(kafkaBillDO.getCost());
costSum += kafkaBillDO.getCost();
billStaffDetailVO.getBillList().add(vo);
}
billStaffDetailVO.setCostSum(costSum);
return new Result<>(billStaffDetailVO);
}
}

View File

@@ -0,0 +1,204 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
import com.xiaojukeji.kafka.manager.common.bizenum.KafkaClientEnum;
import com.xiaojukeji.kafka.manager.common.constant.KafkaMetricsCollections;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.ao.cluster.LogicalCluster;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.BrokerOverviewVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.RealTimeMetricsVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.TopicOverviewVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.cluster.LogicClusterVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.cluster.NormalClusterMetricsVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.cluster.TopicMetadataVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.TopicThrottleVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.broker.BrokerMetadataVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.BrokerMetadata;
import com.xiaojukeji.kafka.manager.common.entity.ao.BrokerOverviewDTO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.LogicalClusterDO;
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.BrokerService;
import com.xiaojukeji.kafka.manager.service.service.LogicalClusterService;
import com.xiaojukeji.kafka.manager.service.service.ThrottleService;
import com.xiaojukeji.kafka.manager.service.service.TopicService;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.ClusterModelConverter;
import com.xiaojukeji.kafka.manager.web.converters.CommonModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.*;
/**
* @author zengqiao
* @date 20/3/31
*/
@Api(tags = "Normal-Cluster相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
public class NormalClusterController {
@Autowired
private ThrottleService throttleService;
@Autowired
private BrokerService brokerService;
@Autowired
private TopicService topicService;
@Autowired
private LogicalClusterService logicalClusterService;
@Autowired
private LogicalClusterMetadataManager logicalClusterMetadataManager;
@ApiOperation(value = "集群列表", notes = "")
@RequestMapping(value = "clusters/basic-info", method = RequestMethod.GET)
@ResponseBody
public Result<List<LogicClusterVO>> getLogicClusterVOList(
@RequestParam(value = "all", required = false) Boolean all) {
if (!ValidateUtils.isNull(all) && all) {
return new Result<>(ClusterModelConverter.convert2LogicClusterVOList(
logicalClusterService.getAllLogicalCluster()
));
}
return new Result<>(ClusterModelConverter.convert2LogicClusterVOList(
logicalClusterService.getLogicalClusterListByPrincipal(SpringTool.getUserName())
));
}
@ApiOperation(value = "集群基本信息", notes = "")
@RequestMapping(value = "clusters/{logicalClusterId}/basic-info", method = RequestMethod.GET)
@ResponseBody
public Result<LogicClusterVO> getLogicClusterVO(@PathVariable Long logicalClusterId) {
LogicalCluster logicalCluster = logicalClusterService.getLogicalCluster(logicalClusterId);
if (ValidateUtils.isNull(logicalCluster)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(ClusterModelConverter.convert2LogicClusterVO(logicalCluster));
}
@ApiOperation(value = "集群实时流量")
@RequestMapping(value = "clusters/{logicalClusterId}/metrics", method = RequestMethod.GET)
@ResponseBody
public Result<RealTimeMetricsVO> getClusterRealTimeMetrics(@PathVariable Long logicalClusterId) {
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getLogicalCluster(logicalClusterId);
if (ValidateUtils.isNull(logicalClusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(CommonModelConverter.convert2RealTimeMetricsVO(
brokerService.getBrokerMetricsFromJmx(
logicalClusterDO.getClusterId(),
logicalClusterMetadataManager.getBrokerIdSet(logicalClusterId),
KafkaMetricsCollections.COMMON_DETAIL_METRICS
)
));
}
@ApiOperation(value = "集群历史流量")
@RequestMapping(value = "clusters/{logicalClusterId}/metrics-history", method = RequestMethod.GET)
@ResponseBody
public Result<List<NormalClusterMetricsVO>> getClusterMetricsHistory(@PathVariable Long logicalClusterId,
@RequestParam("startTime") Long startTime,
@RequestParam("endTime") Long endTime) {
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getLogicalCluster(logicalClusterId);
if (ValidateUtils.isNull(logicalClusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(ClusterModelConverter.convert2NormalClusterMetricsVOList(
logicalClusterService.getLogicalClusterMetricsFromDB(
logicalClusterDO,
new Date(startTime),
new Date(endTime)
)
));
}
@ApiOperation(value = "集群Broker列表", notes = "")
@RequestMapping(value = "clusters/{logicalClusterId}/brokers", method = RequestMethod.GET)
@ResponseBody
public Result<List<BrokerOverviewVO>> getBrokerOverview(@PathVariable Long logicalClusterId) {
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getLogicalCluster(logicalClusterId);
if (ValidateUtils.isNull(logicalClusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
List<BrokerOverviewDTO> brokerOverviewDTOList = brokerService.getBrokerOverviewList(
logicalClusterDO.getClusterId(),
logicalClusterMetadataManager.getBrokerIdSet(logicalClusterId)
);
return new Result<>(ClusterModelConverter.convert2BrokerOverviewList(brokerOverviewDTOList, null));
}
@ApiOperation(value = "集群Topic列表")
@RequestMapping(value = "clusters/{logicalClusterId}/topics", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicOverviewVO>> getTopicOverview(@PathVariable Long logicalClusterId) {
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getLogicalCluster(logicalClusterId);
if (ValidateUtils.isNull(logicalClusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(CommonModelConverter.convert2TopicOverviewVOList(
logicalClusterId,
topicService.getTopicOverviewList(
logicalClusterDO.getClusterId(),
new ArrayList<>(logicalClusterMetadataManager.getTopicNameSet(logicalClusterId))
)
));
}
@ApiOperation(value = "集群限流信息")
@RequestMapping(value = "clusters/{logicalClusterId}/throttles", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicThrottleVO>> getThrottles(@PathVariable Long logicalClusterId) {
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getLogicalCluster(logicalClusterId);
if (ValidateUtils.isNull(logicalClusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(ClusterModelConverter.convert2TopicThrottleVOList(
throttleService.getThrottledTopicsFromJmx(
logicalClusterDO.getClusterId(),
logicalClusterMetadataManager.getBrokerIdSet(logicalClusterId),
Arrays.asList(KafkaClientEnum.values())
)
));
}
@ApiOperation(value = "集群Topic元信息列表", notes = "")
@RequestMapping(value = "clusters/{logicalClusterId}/topic-metadata", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicMetadataVO>> getTopicMetadatas(@PathVariable("logicalClusterId") Long logicalClusterId) {
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getLogicalCluster(logicalClusterId);
if (ValidateUtils.isNull(logicalClusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(ClusterModelConverter.convert2TopicMetadataVOList(
logicalClusterService.getTopicMetadatas(logicalClusterDO)
));
}
@ApiOperation(value = "集群Broker元信息列表", notes = "")
@RequestMapping(value = "clusters/{logicalClusterId}/broker-metadata", method = RequestMethod.GET)
@ResponseBody
public Result<List<BrokerMetadataVO>> getBrokerMetadatas(@PathVariable("logicalClusterId") Long logicalClusterId) {
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getLogicalCluster(logicalClusterId);
if (ValidateUtils.isNull(logicalClusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
List<BrokerMetadata> metadataList = logicalClusterService.getBrokerMetadatas(logicalClusterDO);
List<BrokerMetadataVO> voList = new ArrayList<>();
for (BrokerMetadata brokerMetadata: metadataList) {
voList.add(new BrokerMetadataVO(brokerMetadata.getBrokerId(), brokerMetadata.getHost()));
}
return new Result<>(voList);
}
}

View File

@@ -0,0 +1,72 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
import com.xiaojukeji.kafka.manager.common.bizenum.*;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
import com.xiaojukeji.kafka.manager.service.utils.ConfigUtils;
import com.xiaojukeji.kafka.manager.common.utils.JsonUtils;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import springfox.documentation.annotations.ApiIgnore;
/**
* @author zengqiao
* @date 20/4/20
*/
@Api(tags = "Normal-Config相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
public class NormalConfigController {
@Autowired
private ConfigUtils configUtils;
@ApiOperation(value = "集群类型", notes = "")
@RequestMapping(value = "configs/cluster-modes", method = RequestMethod.GET)
@ResponseBody
public Result getClusterModesEnum() {
return new Result<>(JsonUtils.toJson(ClusterModeEnum.class));
}
@ApiOperation(value = "集群套餐", notes = "")
@RequestMapping(value = "configs/cluster-combos", method = RequestMethod.GET)
@ResponseBody
public Result getClusterCombosEnum() {
return new Result<>(JsonUtils.toJson(ClusterComboEnum.class));
}
@ApiOperation(value = "数据中心", notes = "")
@RequestMapping(value = "configs/idc", method = RequestMethod.GET)
@ResponseBody
public Result getIDC() {
return new Result<>(ListUtils.string2StrList(configUtils.getIdc()));
}
@ApiIgnore
@ApiOperation(value = "数据中心集合", notes = "")
@RequestMapping(value = "configs/idcs", method = RequestMethod.GET)
@ResponseBody
public Result getInternetDataCenters() {
return new Result<>(JsonUtils.toJson(IDCEnum.class));
}
@ApiOperation(value = "峰值状态", notes = "")
@RequestMapping(value = "configs/peak-flow-status", method = RequestMethod.GET)
@ResponseBody
public Result getPeakFlowStatusEnum() {
return new Result<>(JsonUtils.toJson(PeakFlowStatusEnum.class));
}
@ApiOperation(value = "任务状态", notes = "")
@RequestMapping(value = "configs/task-status", method = RequestMethod.GET)
@ResponseBody
public Result getTaskStatusEnum() {
return new Result<>(JsonUtils.toJson(TaskStatusEnum.class));
}
}

View File

@@ -0,0 +1,184 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
import com.xiaojukeji.kafka.manager.common.bizenum.OffsetLocationEnum;
import com.xiaojukeji.kafka.manager.common.bizenum.OffsetPosEnum;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.ao.PartitionOffsetDTO;
import com.xiaojukeji.kafka.manager.common.entity.ao.consumer.ConsumeDetailDTO;
import com.xiaojukeji.kafka.manager.common.entity.ao.consumer.ConsumerGroupDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.TopicOffsetResetDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.consumer.ConsumerGroupDetailVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.consumer.ConsumerGroupVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.service.service.ConsumerService;
import com.xiaojukeji.kafka.manager.service.service.TopicService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.ConsumerModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* @author zengqiao
* @date 20/4/8
*/
@Api(tags = "Normal-Consumer相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
public class NormalConsumerController {
private final static Logger LOGGER = LoggerFactory.getLogger(NormalConsumerController.class);
@Autowired
private ClusterService clusterService;
@Autowired
private TopicService topicService;
@Autowired
private ConsumerService consumerService;
@Autowired
private LogicalClusterMetadataManager logicalClusterMetadataManager;
@ApiOperation(value = "查询消费Topic的消费组", notes = "")
@RequestMapping(value = "{clusterId}/consumers/{topicName}/consumer-groups", method = RequestMethod.GET)
@ResponseBody
public Result<List<ConsumerGroupVO>> getConsumeGroups(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(ConsumerModelConverter.convert2ConsumerGroupVOList(
consumerService.getConsumerGroupList(physicalClusterId, topicName))
);
}
@ApiOperation(value = "查询消费组的消费详情", notes = "")
@RequestMapping(value = "{clusterId}/consumers/{consumerGroup}/topics/{topicName}/consume-details",
method = RequestMethod.GET)
@ResponseBody
public Result<List<ConsumerGroupDetailVO>> getConsumeDetail(
@PathVariable Long clusterId,
@PathVariable String consumerGroup,
@PathVariable String topicName,
@RequestParam("location") String location,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
if (ValidateUtils.isNull(location)) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
ClusterDO clusterDO = clusterService.getById(physicalClusterId);
if (ValidateUtils.isNull(clusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
location = location.toLowerCase();
OffsetLocationEnum offsetStoreLocation = OffsetLocationEnum.getOffsetStoreLocation(location);
if (ValidateUtils.isNull(offsetStoreLocation)) {
return Result.buildFrom(ResultStatus.CG_LOCATION_ILLEGAL);
}
ConsumerGroupDTO consumeGroupDTO = new ConsumerGroupDTO(
clusterDO.getId(),
consumerGroup,
new ArrayList<>(),
offsetStoreLocation
);
try {
List<ConsumeDetailDTO> consumeDetailDTOList =
consumerService.getConsumeDetail(clusterDO, topicName, consumeGroupDTO);
return new Result<>(
ConsumerModelConverter.convert2ConsumerGroupDetailVO(
topicName,
consumerGroup,
location,
consumeDetailDTOList
)
);
} catch (Exception e) {
LOGGER.error("get consume detail failed, consumerGroup:{}.", consumeGroupDTO, e);
}
return Result.buildFrom(ResultStatus.OPERATION_FAILED);
}
@ApiOperation(value = "重置Topic消费偏移", notes = "")
@RequestMapping(value = "consumers/offsets", method = RequestMethod.PUT)
@ResponseBody
public Result<List<Result>> resetConsumeOffsets(@RequestBody TopicOffsetResetDTO dto) {
if (!dto.legal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(
dto.getClusterId(),
dto.getIsPhysicalClusterId()
);
ClusterDO clusterDO = clusterService.getById(physicalClusterId);
if (ValidateUtils.isNull(clusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
List<PartitionOffsetDTO> offsetDTOList = getPartitionOffset(clusterDO, dto);
if (ValidateUtils.isEmptyList(offsetDTOList)) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
ConsumerGroupDTO consumerGroupDTO = new ConsumerGroupDTO(
physicalClusterId,
dto.getConsumerGroup(),
new ArrayList<>(),
OffsetLocationEnum.getOffsetStoreLocation(dto.getLocation())
);
List<Result> resultList = consumerService.resetConsumerOffset(
clusterDO,
dto.getTopicName(),
consumerGroupDTO,
offsetDTOList
);
for (Result result: resultList) {
if (ResultStatus.SUCCESS.getCode() != result.getCode()) {
return new Result<>(ResultStatus.OPERATION_FAILED.getCode(), resultList, "operator failed");
}
}
return new Result<>(resultList);
}
private List<PartitionOffsetDTO> getPartitionOffset(ClusterDO clusterDO, TopicOffsetResetDTO dto) {
OffsetPosEnum offsetPosEnum = OffsetPosEnum.getOffsetPosEnum(dto.getOffsetPos());
if (!ValidateUtils.isNull(dto.getOffsetPos()) && !OffsetPosEnum.NONE.equals(offsetPosEnum)) {
// 按照指定位置进行重置, 此时获取指定位置的offset
List<PartitionOffsetDTO> offsetDTOList = new ArrayList<>();
Map<TopicPartition, Long> offsetMap = topicService.getPartitionOffset(clusterDO, dto.getTopicName(), offsetPosEnum);
for (Map.Entry<TopicPartition, Long> entry : offsetMap.entrySet()) {
offsetDTOList.add(new PartitionOffsetDTO(entry.getKey().partition(), entry.getValue()));
}
return offsetDTOList;
}
// 指定offset
if (!ValidateUtils.isEmptyList(dto.getOffsetList())) {
return dto.getOffsetList();
}
// 获取指定时间点的offset
if (!ValidateUtils.isNullOrLessThanZero(dto.getTimestamp())) {
return topicService.getPartitionOffsetList(clusterDO, dto.getTopicName(), dto.getTimestamp());
}
return new ArrayList<>();
}
}

View File

@@ -0,0 +1,51 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.JmxSwitchDTO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.didi.TopicJmxSwitch;
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.ZookeeperService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 20/8/21
*/
@Api(tags = "RD-Jmx维度相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
public class NormalJmxController {
@Autowired
private ZookeeperService zookeeperService;
@Autowired
private LogicalClusterMetadataManager logicalClusterMetadataManager;
@ApiOperation(value = "开启TopicJMX", notes="")
@RequestMapping(value = "jmx-switch", method = RequestMethod.POST)
@ResponseBody
public Result openTopicJmxSwitch(@RequestBody JmxSwitchDTO dto) {
if (ValidateUtils.isNull(dto) || !dto.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(
dto.getClusterId(),
dto.getPhysicalClusterId()
);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return zookeeperService.openTopicJmx(physicalClusterId, dto.getTopicName(), new TopicJmxSwitch(
dto.getOpenTopicRequestMetrics(),
dto.getOpenAppIdTopicMetrics(),
dto.getOpenClientRequestMetrics()
));
}
}

View File

@@ -0,0 +1,222 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.AppDO;
import com.xiaojukeji.kafka.manager.monitor.common.entry.bizenum.MonitorMetricNameEnum;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.monitor.common.entry.dto.MonitorRuleDTO;
import com.xiaojukeji.kafka.manager.monitor.common.entry.dto.MonitorSilenceDTO;
import com.xiaojukeji.kafka.manager.monitor.common.entry.vo.*;
import com.xiaojukeji.kafka.manager.monitor.common.monitor.*;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.monitor.common.entry.Alert;
import com.xiaojukeji.kafka.manager.monitor.common.entry.NotifyGroup;
import com.xiaojukeji.kafka.manager.monitor.common.entry.Silence;
import com.xiaojukeji.kafka.manager.common.entity.pojo.MonitorRuleDO;
import com.xiaojukeji.kafka.manager.service.service.gateway.AppService;
import com.xiaojukeji.kafka.manager.monitor.MonitorService;
import com.xiaojukeji.kafka.manager.common.utils.JsonUtils;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.MonitorRuleConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author zengqiao
* @date 20/5/4
*/
@Api(tags = "Normal-Monitor相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
public class NormalMonitorController {
@Autowired
private AppService appService;
@Autowired
private MonitorService monitorService;
@ApiOperation(value = "监控枚举类", notes = "")
@RequestMapping(value = "monitor-enums", method = RequestMethod.GET)
@ResponseBody
public Result getMonitorEnums() {
Map<String, Object> data = new HashMap<>(1);
data.put("metricNames", JsonUtils.toJson(MonitorMetricNameEnum.class));
return new Result<>(data);
}
@ApiOperation(value = "添加监控策略", notes = "")
@RequestMapping(value = "monitor-strategies", method = RequestMethod.POST)
@ResponseBody
public Result createMonitor(@RequestBody MonitorRuleDTO dto) {
if (!dto.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return Result.buildFrom(monitorService.createMonitorRule(dto, SpringTool.getUserName()));
}
@ApiOperation(value = "删除监控策略", notes = "")
@RequestMapping(value = "monitor-strategies", method = RequestMethod.DELETE)
@ResponseBody
public Result deleteMonitor(@RequestParam("monitorId") Long monitorId) {
return Result.buildFrom(monitorService.deleteMonitorRule(monitorId, SpringTool.getUserName()));
}
@ApiOperation(value = "修改监控策略", notes = "")
@RequestMapping(value = "monitor-strategies", method = RequestMethod.PUT)
@ResponseBody
public Result modifyMonitors(@RequestBody MonitorRuleDTO dto) {
if (!dto.paramLegal() || ValidateUtils.isNull(dto.getId())) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return Result.buildFrom(monitorService.modifyMonitorRule(dto, SpringTool.getUserName()));
}
@ApiOperation(value = "监控策略列表", notes = "")
@RequestMapping(value = "monitor-strategies", method = RequestMethod.GET)
@ResponseBody
public Result<List<MonitorRuleSummaryVO>> getMonitorRules() {
List<MonitorRuleSummary> monitorRuleSummaryList =
monitorService.getMonitorRules(SpringTool.getUserName());
if (ValidateUtils.isEmptyList(monitorRuleSummaryList)) {
return new Result<>(new ArrayList<>());
}
List<MonitorRuleSummaryVO> voList = new ArrayList<>();
for (MonitorRuleSummary summary: monitorRuleSummaryList) {
MonitorRuleSummaryVO vo = new MonitorRuleSummaryVO();
CopyUtils.copyProperties(vo, summary);
voList.add(vo);
}
return new Result<>(voList);
}
@ApiOperation(value = "监控策略详情", notes = "")
@RequestMapping(value = "monitor-strategies/{monitorId}/detail", method = RequestMethod.GET)
@ResponseBody
public Result<MonitorRuleDetailVO> getMonitorDetail(@PathVariable("monitorId") Long monitorId) {
MonitorRuleDO monitorRuleDO = monitorService.getById(monitorId);
if (ValidateUtils.isNull(monitorRuleDO)) {
return Result.buildFrom(ResultStatus.MONITOR_NOT_EXIST);
}
Result<MonitorRuleDTO> monitorRuleDTOResult = monitorService.getMonitorRuleDetail(monitorRuleDO);
if (!Constant.SUCCESS.equals(monitorRuleDTOResult.getCode())) {
return new Result<>(monitorRuleDTOResult.getCode(), monitorRuleDTOResult.getMessage());
}
MonitorRuleDTO monitorRuleDTO = monitorRuleDTOResult.getData();
AppDO appDO = appService.getByAppId(monitorRuleDTO.getAppId());
return new Result<>(MonitorRuleConverter.convert2MonitorRuleDetailVO(monitorRuleDO, monitorRuleDTO, appDO));
}
@ApiOperation(value = "告警列表", notes = "")
@RequestMapping(value = "monitor-alerts", method = RequestMethod.GET)
@ResponseBody
public Result<List<MonitorAlertVO>> getMonitorAlertHistory(@RequestParam("monitorId") Long monitorId,
@RequestParam("startTime") Long startTime,
@RequestParam("endTime") Long endTime) {
Result<List<Alert>> alertResult = monitorService.getMonitorAlertHistory(monitorId, startTime, endTime);
if (!Constant.SUCCESS.equals(alertResult.getCode())) {
return new Result<>(alertResult.getCode(), alertResult.getMessage());
}
return new Result<>(MonitorRuleConverter.convert2MonitorAlertVOList(alertResult.getData()));
}
@ApiOperation(value = "告警详情", notes = "")
@RequestMapping(value = "monitor-alerts/{alertId}/detail", method = RequestMethod.GET)
@ResponseBody
public Result<MonitorAlertDetailVO> getMonitorAlertDetail(@PathVariable("alertId") Long alertId) {
Result<MonitorAlertDetail> alertResult = monitorService.getMonitorAlertDetail(alertId);
if (!Constant.SUCCESS.equals(alertResult.getCode())) {
return new Result<>(alertResult.getCode(), alertResult.getMessage());
}
return new Result<>(MonitorRuleConverter.convert2MonitorAlertDetailVO(alertResult.getData()));
}
@ApiOperation(value = "告警屏蔽创建", notes = "")
@RequestMapping(value = "monitor-silences", method = RequestMethod.POST)
@ResponseBody
public Result createMonitorSilences(@RequestBody MonitorSilenceDTO dto) {
if (!dto.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return monitorService.createSilence(dto, SpringTool.getUserName());
}
@ApiOperation(value = "告警屏蔽修改", notes = "")
@RequestMapping(value = "monitor-silences", method = RequestMethod.PUT)
@ResponseBody
public Result modifyMonitorSilences(@RequestBody MonitorSilenceDTO dto) {
if (!dto.paramLegal() || ValidateUtils.isNull(dto.getId())) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return monitorService.modifySilence(dto, SpringTool.getUserName());
}
@ApiOperation(value = "告警屏蔽删除", notes = "")
@RequestMapping(value = "monitor-silences", method = RequestMethod.DELETE)
@ResponseBody
public Result releaseMonitorSilences(@RequestParam("monitorId") Long monitorId,
@RequestParam("silenceId") Long silenceId) {
Boolean status = monitorService.releaseSilence(silenceId);
if (ValidateUtils.isNull(status) || !status) {
return Result.buildFrom(ResultStatus.CALL_MONITOR_SYSTEM_ERROR);
}
return new Result();
}
@ApiOperation(value = "告警屏蔽列表", notes = "")
@RequestMapping(value = "monitor-silences", method = RequestMethod.GET)
@ResponseBody
public Result<List<MonitorSilenceVO>> getMonitorSilences(@RequestParam("monitorId") Long monitorId) {
MonitorRuleDO monitorRuleDO = monitorService.getById(monitorId);
if (ValidateUtils.isNull(monitorRuleDO)) {
return Result.buildFrom(ResultStatus.MONITOR_NOT_EXIST);
}
Result<List<Silence>> listResult = monitorService.getSilences(monitorRuleDO.getStrategyId());
if (!Constant.SUCCESS.equals(listResult.getCode())) {
return new Result<>(listResult.getCode(), listResult.getMessage());
}
return new Result<>(MonitorRuleConverter.convert2MonitorSilenceVOList(monitorRuleDO, listResult.getData()));
}
@ApiOperation(value = "告警屏蔽详情", notes = "")
@RequestMapping(value = "monitor-silences/{silenceId}/detail", method = RequestMethod.GET)
@ResponseBody
public Result<MonitorSilenceVO> getMonitorSilence(@PathVariable("silenceId") Long silenceId) {
Silence silence = monitorService.getSilenceById(silenceId);
if (ValidateUtils.isNull(silence)) {
return Result.buildFrom(ResultStatus.CALL_MONITOR_SYSTEM_ERROR);
}
MonitorRuleDO monitorRuleDO = monitorService.getByStrategyId(silence.getStrategyId());
if (ValidateUtils.isNull(monitorRuleDO)) {
return Result.buildFrom(ResultStatus.MONITOR_NOT_EXIST);
}
return new Result<>(MonitorRuleConverter.convert2MonitorSilenceVO(monitorRuleDO, silence));
}
@ApiOperation(value = "告警组列表", notes = "")
@RequestMapping(value = "monitor-notify-groups", method = RequestMethod.GET)
@ResponseBody
public Result<List<MonitorNotifyGroupVO>> getNotifyGroups() {
List<NotifyGroup> notifyGroupList = monitorService.getNotifyGroups();
if (ValidateUtils.isNull(notifyGroupList)) {
return Result.buildFrom(ResultStatus.CALL_MONITOR_SYSTEM_ERROR);
}
return new Result<>(MonitorRuleConverter.convert2MonitorNotifyGroupVOList(notifyGroupList));
}
}

View File

@@ -0,0 +1,125 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
import com.xiaojukeji.kafka.manager.bpm.OrderService;
import com.xiaojukeji.kafka.manager.bpm.common.OrderStatusEnum;
import com.xiaojukeji.kafka.manager.bpm.common.OrderTypeEnum;
import com.xiaojukeji.kafka.manager.bpm.common.entry.BaseOrderDetailData;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.bpm.common.entry.apply.OrderDTO;
import com.xiaojukeji.kafka.manager.bpm.common.handle.OrderHandleBaseDTO;
import com.xiaojukeji.kafka.manager.bpm.common.handle.OrderHandleBatchDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.order.OrderResultVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.order.OrderTypeVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.order.OrderVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.order.detail.OrderDetailBaseVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.OrderDO;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.OrderConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 20/4/15
*/
@Api(tags = "Normal-工单相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
public class NormalOrderController {
@Autowired
private OrderService orderService;
@ApiOperation(value = "工单类型", notes = "")
@RequestMapping(value = "orders/type-enums", method = RequestMethod.GET)
@ResponseBody
public Result<List<OrderTypeVO>> getOrderTypes() {
List<OrderTypeVO> voList = new ArrayList<>();
for (OrderTypeEnum elem : OrderTypeEnum.values()) {
voList.add(new OrderTypeVO(elem.getCode(), elem.getMessage()));
}
return new Result<>(voList);
}
@ApiOperation(value = "工单申请", notes = "")
@RequestMapping(value = "orders", method = RequestMethod.POST)
@ResponseBody
public Result<OrderVO> createOrder(@RequestBody OrderDTO dto) {
dto.setApplicant(SpringTool.getUserName());
Result result = orderService.createOrder(dto);
if (!Constant.SUCCESS.equals(result.getCode())) {
return result;
}
return new Result<>(OrderConverter.convert2OrderVO((OrderDO) result.getData()));
}
@ApiOperation(value = "工单撤销", notes = "")
@RequestMapping(value = "orders", method = RequestMethod.DELETE)
@ResponseBody
public Result cancelOrder(@RequestParam(value = "id") Long id) {
return Result.buildFrom(orderService.cancelOrder(id, SpringTool.getUserName()));
}
@ApiOperation(value = "工单申请列表", notes = "")
@RequestMapping(value = "orders", method = RequestMethod.GET)
@ResponseBody
public Result<List<OrderVO>> getOrderApplyList(@RequestParam(value = "status") Integer status) {
return new Result<>(OrderConverter.convert2OrderVOList(
orderService.getOrderApplyList(SpringTool.getUserName(), status))
);
}
@ApiOperation(value = "工单审核列表", notes = "")
@RequestMapping(value = "approvals", method = RequestMethod.GET)
@ResponseBody
public Result<List<OrderVO>> getOrderApprovalList(@RequestParam(value = "status") Integer status) {
List<OrderDO> orderDOList = new ArrayList<>();
String userName = SpringTool.getUserName();
if (ValidateUtils.isNull(status)) {
orderDOList = orderService.getApprovalList(userName);
} else if (OrderStatusEnum.WAIT_DEAL.getCode().equals(status)) {
orderDOList = orderService.getWaitApprovalList(userName);
} else if (OrderStatusEnum.PASSED.getCode().equals(status)) {
orderDOList = orderService.getPassApprovalList(userName);
}
return new Result<>(OrderConverter.convert2OrderVOList(orderDOList));
}
@ApiOperation(value = "工单详情", notes = "")
@RequestMapping(value = "orders/{orderId}/detail", method = RequestMethod.GET)
@ResponseBody
public Result<OrderDetailBaseVO> getOrderDetail(@PathVariable Long orderId) {
Result result = orderService.getOrderDetailData(orderId);
if (!Constant.SUCCESS.equals(result.getCode())) {
return result;
}
return new Result<>(OrderConverter.convert2DetailBaseVO((BaseOrderDetailData) result.getData()));
}
@ApiOperation(value = "工单审批", notes = "")
@RequestMapping(value = "orders", method = RequestMethod.PUT)
@ResponseBody
public Result handleOrder(@RequestBody OrderHandleBaseDTO dto) {
return Result.buildFrom(orderService.handleOrder(dto));
}
@ApiOperation(value = "批量审批", notes = "")
@RequestMapping(value = "orders/batch", method = RequestMethod.PUT)
@ResponseBody
public Result<List<OrderResultVO>> handleOrderBatch(@RequestBody OrderHandleBatchDTO dto) {
if (ValidateUtils.isNull(dto) || !dto.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return new Result<>(OrderConverter.convert2OrderResultVOList(
orderService.handleOrderBatch(dto, SpringTool.getUserName())
));
}
}

View File

@@ -0,0 +1,322 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.constant.KafkaMetricsCollections;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.TopicPartitionDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.TopicDataSampleDTO;
import com.xiaojukeji.kafka.manager.common.entity.metrics.BaseMetrics;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.RealTimeMetricsVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.TopicBusinessInfoVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic.*;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.KafkaBillDO;
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.cache.PhysicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.*;
import com.xiaojukeji.kafka.manager.service.service.gateway.TopicConnectionService;
import com.xiaojukeji.kafka.manager.service.utils.ConfigUtils;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.CommonModelConverter;
import com.xiaojukeji.kafka.manager.web.converters.TopicModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.*;
/**
* @author zengqiao
* @date 20/3/31
*/
@Api(tags = "Normal-Topic详情相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
public class NormalTopicController {
@Autowired
private ClusterService clusterService;
@Autowired
private TopicService topicService;
@Autowired
private TopicManagerService topicManagerService;
@Autowired
private TopicConnectionService connectionService;
@Autowired
private LogicalClusterMetadataManager logicalClusterMetadataManager;
@Autowired
private ConfigUtils configUtils;
@Autowired
private KafkaBillService kafkaBillService;
@ApiOperation(value = "Topic基本信息", notes = "")
@RequestMapping(value = "{clusterId}/topics/{topicName}/basic-info", method = RequestMethod.GET)
@ResponseBody
public Result<TopicBasicVO> getTopicBasic(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(TopicModelConverter.convert2TopicBasicVO(
topicService.getTopicBasicDTO(physicalClusterId, topicName),
""
));
}
@ApiOperation(value = "Topic实时流量信息", notes = "")
@RequestMapping(value = "{clusterId}/topics/{topicName}/metrics", method = RequestMethod.GET)
@ResponseBody
public Result<RealTimeMetricsVO> getTopicMetrics(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(CommonModelConverter.convert2RealTimeMetricsVO(
topicService.getTopicMetricsFromJMX(
physicalClusterId,
topicName,
KafkaMetricsCollections.COMMON_DETAIL_METRICS,
true
)
));
}
@ApiOperation(value = "Topic历史流量信息", notes = "")
@RequestMapping(value = "{clusterId}/topics/{topicName}/metrics-history", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicMetricVO>> getTopicMetrics(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam("startTime") Long startTime,
@RequestParam("endTime") Long endTime,
@RequestParam(value = "appId", required = false) String appId,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
if (ValidateUtils.isBlank(appId)) {
return new Result<>(TopicModelConverter.convert2TopicMetricsVOList(
topicService.getTopicMetricsFromDB(
physicalClusterId,
topicName,
new Date(startTime),
new Date(endTime)
)
));
}
return new Result<>(TopicModelConverter.convert2TopicMetricVOList(
topicService.getTopicMetricsFromDB(
appId,
physicalClusterId,
topicName,
new Date(startTime),
new Date(endTime)
)
));
}
@ApiOperation(value = "Topic实时请求耗时信息", notes = "")
@RequestMapping(value = "{clusterId}/topics/{topicName}/request-time", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicRequestTimeDetailVO>> getTopicRequestMetrics(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
BaseMetrics metrics = topicService.getTopicMetricsFromJMX(
physicalClusterId,
topicName,
KafkaMetricsCollections.TOPIC_REQUEST_TIME_DETAIL_PAGE_METRICS,
false
);
return new Result<>(TopicModelConverter.convert2TopicRequestTimeDetailVOList(metrics));
}
@ApiOperation(value = "Topic历史请求耗时信息", notes = "")
@RequestMapping(value = "{clusterId}/topics/{topicName}/request-time-history", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicRequestTimeVO>> getTopicRequestMetrics(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam("startTime") Long startTime,
@RequestParam("endTime") Long endTime,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(TopicModelConverter.convert2TopicRequestTimeMetricsVOList(
topicService.getTopicRequestMetricsFromDB(
physicalClusterId,
topicName,
new Date(startTime),
new Date(endTime)
))
);
}
@ApiOperation(value = "Topic连接信息", notes = "")
@RequestMapping(value = "{clusterId}/topics/{topicName}/connections", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicConnectionVO>> getTopicConnections(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam(value = "appId", required = false) String appId,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(TopicModelConverter.convert2TopicConnectionVOList(
connectionService.getByTopicName(
physicalClusterId,
topicName,
new Date(System.currentTimeMillis() - Constant.TOPIC_CONNECTION_LATEST_TIME_MS),
new Date()
)
));
}
@ApiOperation(value = "Topic分区信息", notes = "")
@RequestMapping(value = "{clusterId}/topics/{topicName}/partitions", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicPartitionVO>> getTopicPartitions(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
ClusterDO clusterDO = clusterService.getById(physicalClusterId);
if (clusterDO == null || !PhysicalClusterMetadataManager.isTopicExist(physicalClusterId, topicName)) {
return Result.buildFrom(ResultStatus.TOPIC_NOT_EXIST);
}
List<TopicPartitionDTO> dtoList = topicService.getTopicPartitionDTO(clusterDO, topicName, true);
return new Result<>(TopicModelConverter.convert2TopicPartitionVOList(dtoList));
}
@ApiOperation(value = "Topic采样信息", notes = "")
@RequestMapping(value = "{clusterId}/topics/{topicName}/sample", method = RequestMethod.POST)
@ResponseBody
public Result<List<TopicDataSampleVO>> previewTopic(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestBody TopicDataSampleDTO reqObj) {
reqObj.adjustConfig();
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, reqObj.getIsPhysicalClusterId());
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
ClusterDO clusterDO = clusterService.getById(physicalClusterId);
if (ValidateUtils.isNull(clusterDO)
|| !PhysicalClusterMetadataManager.isTopicExist(physicalClusterId, topicName)) {
return Result.buildFrom(ResultStatus.TOPIC_NOT_EXIST);
}
List<String> dataList = topicService.fetchTopicData(clusterDO, topicName, reqObj);
if (ValidateUtils.isNull(dataList)) {
return Result.buildFrom(ResultStatus.OPERATION_FAILED);
}
return new Result<>(TopicModelConverter.convert2TopicDataSampleVOList(dataList));
}
@ApiOperation(value = "Topic账单信息", notes = "")
@RequestMapping(value = "{clusterId}/topics/{topicName}/bills", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicBillVO>> getTopicBills(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam("startTime") Long startTime,
@RequestParam("endTime") Long endTime,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
List<KafkaBillDO> kafkaBillDOList =
kafkaBillService.getByTopicName(physicalClusterId, topicName, new Date(startTime), new Date(endTime));
List<TopicBillVO> voList = new ArrayList<>();
for (KafkaBillDO kafkaBillDO: kafkaBillDOList) {
TopicBillVO vo = new TopicBillVO();
vo.setQuota(kafkaBillDO.getQuota().longValue());
vo.setCost(kafkaBillDO.getCost());
vo.setGmtMonth(kafkaBillDO.getGmtDay());
voList.add(vo);
}
return new Result<>(voList);
}
@ApiOperation(value = "获取Topic业务信息", notes = "")
@RequestMapping(value = "{clusterId}/topics/{topicName}/business", method = RequestMethod.GET)
@ResponseBody
public Result<TopicBusinessInfoVO> getTopicBusinessInfo(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(TopicModelConverter.convert2TopicBusinessInfoVO(
topicManagerService.getTopicBusinessInfo(physicalClusterId, topicName)
));
}
@ApiOperation(value = "Topic有权限的应用信息", notes = "")
@RequestMapping(value = {"{clusterId}/topics/{topicName}/apps"}, method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicAuthorizedAppVO>> getTopicAuthorizedApps(@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(TopicModelConverter.convert2TopicAuthorizedAppVOList(
topicManagerService.getTopicAuthorizedApps(physicalClusterId, topicName))
);
}
@ApiOperation(value = "Topic我的应用信息", notes = "")
@RequestMapping(value = {"{clusterId}/topics/{topicName}/my-apps"}, method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicMyAppVO>> getTopicMyApps(@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(TopicModelConverter.convert2TopicMineAppVOList(
topicManagerService.getTopicMineApps(physicalClusterId, topicName, SpringTool.getUserName()))
);
}
}

View File

@@ -0,0 +1,109 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.normal;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.TopicModifyDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.TopicRetainDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic.TopicExpiredVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic.TopicMineVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic.TopicVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.TopicExpiredService;
import com.xiaojukeji.kafka.manager.service.service.TopicManagerService;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.utils.ResultCache;
import com.xiaojukeji.kafka.manager.web.converters.TopicMineConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 20/3/31
*/
@Api(tags = "Normal-Topic操作相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_NORMAL_PREFIX)
public class NormalTopicMineController {
@Autowired
private TopicExpiredService topicExpiredService;
@Autowired
private TopicManagerService topicManagerService;
@Autowired
private LogicalClusterMetadataManager logicalClusterMetadataManager;
@ApiOperation(value = "我的Topic", notes = "")
@RequestMapping(value = "topics/mine", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicMineVO>> getMineTopics() {
return new Result<>(TopicMineConverter.convert2TopicMineVOList(
topicManagerService.getMyTopics(SpringTool.getUserName())
));
}
@ApiOperation(value = "全部Topic", notes = "")
@RequestMapping(value = "topics", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicVO>> getTopics() {
Result r = ResultCache.get(ApiPrefix.API_V1_NORMAL_PREFIX + "topics");
if (ValidateUtils.isNull(r) || !Constant.SUCCESS.equals(r.getCode())) {
r = new Result<>(TopicMineConverter.convert2TopicVOList(
topicManagerService.getTopics(SpringTool.getUserName())
));
ResultCache.put(ApiPrefix.API_V1_NORMAL_PREFIX + "topics", r);
}
return r;
}
@ApiOperation(value = "修改Topic信息", notes = "延长保留, 修改基本信息")
@RequestMapping(value = "topics", method = RequestMethod.PUT)
@ResponseBody
public Result modifyTopic(@RequestBody TopicModifyDTO dto) {
if (!dto.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(dto.getClusterId());
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return Result.buildFrom(
topicManagerService.modifyTopic(
physicalClusterId,
dto.getTopicName(),
dto.getDescription(),
SpringTool.getUserName()
)
);
}
@ApiOperation(value = "过期Topic信息", notes = "")
@RequestMapping(value = "topics/expired", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicExpiredVO>> getExpiredTopics() {
return new Result<>(TopicMineConverter.convert2TopicExpiredVOList(
topicExpiredService.getExpiredTopicDataList(SpringTool.getUserName())
));
}
@ApiOperation(value = "过期Topic保留", notes = "")
@RequestMapping(value = "topics/expired", method = RequestMethod.PUT)
@ResponseBody
public Result retainExpiredTopic(@RequestBody TopicRetainDTO dto) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(dto.getClusterId());
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return Result.buildFrom(
topicExpiredService.retainExpiredTopic(physicalClusterId, dto.getTopicName(), dto.getRetainDays())
);
}
}

View File

@@ -0,0 +1,72 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.op;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.dto.rd.ClusterDTO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.ClusterModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 20/4/23
*/
@Api(tags = "OP-Cluster维度相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_OP_PREFIX)
public class OpClusterController {
@Autowired
private ClusterService clusterService;
@ApiOperation(value = "接入集群")
@RequestMapping(value = "clusters", method = RequestMethod.POST)
@ResponseBody
public Result addNew(@RequestBody ClusterDTO reqObj) {
if (ValidateUtils.isNull(reqObj) || !reqObj.legal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return Result.buildFrom(
clusterService.addNew(
ClusterModelConverter.convert2ClusterDO(reqObj),
SpringTool.getUserName()
)
);
}
@ApiOperation(value = "删除集群")
@RequestMapping(value = "clusters", method = RequestMethod.DELETE)
@ResponseBody
public Result delete(@RequestParam(value = "clusterId") Long clusterId) {
return Result.buildFrom(clusterService.deleteById(clusterId));
}
@ApiOperation(value = "修改集群信息")
@RequestMapping(value = "clusters", method = RequestMethod.PUT)
@ResponseBody
public Result modify(@RequestBody ClusterDTO reqObj) {
if (ValidateUtils.isNull(reqObj) || !reqObj.legal() || ValidateUtils.isNull(reqObj.getClusterId())) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
ResultStatus rs = clusterService.updateById(
ClusterModelConverter.convert2ClusterDO(reqObj),
SpringTool.getUserName()
);
return Result.buildFrom(rs);
}
@ApiOperation(value = "开启|关闭集群监控")
@RequestMapping(value = "clusters/{clusterId}/monitor", method = RequestMethod.PUT)
@ResponseBody
public Result modifyStatus(@PathVariable Long clusterId,
@RequestParam("status") Integer status) {
return Result.buildFrom(
clusterService.modifyStatus(clusterId, status, SpringTool.getUserName())
);
}
}

View File

@@ -0,0 +1,148 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.op;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterTaskDO;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.task.*;
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.ClusterTaskStatus;
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskTypeEnum;
import com.xiaojukeji.kafka.manager.common.bizenum.KafkaFileEnum;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.kcm.common.entry.dto.ClusterTaskActionDTO;
import com.xiaojukeji.kafka.manager.kcm.common.entry.dto.AbstractClusterTaskDTO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.KafkaFileDO;
import com.xiaojukeji.kafka.manager.kcm.ClusterTaskService;
import com.xiaojukeji.kafka.manager.kcm.KafkaFileService;
import com.xiaojukeji.kafka.manager.common.utils.JsonUtils;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.service.cache.PhysicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.web.converters.ClusterTaskModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author zengqiao
* @date 20/4/26
*/
@Api(tags = "OP-Cluster升级部署相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_OP_PREFIX)
public class OpClusterTaskController {
@Autowired
private ClusterService clusterService;
@Autowired
private KafkaFileService kafkaFileService;
@Autowired
private ClusterTaskService clusterTaskService;
@ApiOperation(value = "集群任务类型", notes = "")
@RequestMapping(value = "cluster-tasks/enums", method = RequestMethod.GET)
@ResponseBody
public Result getClusterTaskEnums() {
Map<String, Object> enumMap = new HashMap<>(1);
enumMap.put(AbstractClusterTaskDTO.TASK_TYPE_PROPERTY_FIELD_NAME, JsonUtils.toJson(ClusterTaskTypeEnum.class));
return new Result<>(enumMap);
}
@ApiOperation(value = "创建集群任务", notes = "")
@RequestMapping(value = "cluster-tasks", method = RequestMethod.POST)
@ResponseBody
public Result createTask(@RequestBody AbstractClusterTaskDTO dto) {
dto.setKafkaRoleBrokerHostMap(PhysicalClusterMetadataManager.getKafkaRoleBrokerHostMap(dto.getClusterId()));
dto.setKafkaFileBaseUrl(kafkaFileService.getDownloadBaseUrl());
return clusterTaskService.createTask(dto, SpringTool.getUserName());
}
@ApiOperation(value = "集群任务列表", notes = "")
@RequestMapping(value = "cluster-tasks", method = RequestMethod.GET)
@ResponseBody
public Result<List<ClusterTaskVO>> getTaskList() {
List<ClusterTaskDO> doList = clusterTaskService.listAll();
if (ValidateUtils.isNull(doList)) {
return Result.buildFrom(ResultStatus.MYSQL_ERROR);
}
Map<Long, String> clusterNameMap = clusterService.listAll().stream().collect(
Collectors.toMap(ClusterDO::getId, ClusterDO::getClusterName, (key1, key2) -> key2)
);
return new Result<>(ClusterTaskModelConverter.convert2ClusterTaskVOList(doList, clusterNameMap));
}
@ApiOperation(value = "触发集群任务", notes = "")
@RequestMapping(value = "cluster-tasks", method = RequestMethod.PUT)
@ResponseBody
public Result executeTask(@RequestBody ClusterTaskActionDTO dto) {
return Result.buildFrom(clusterTaskService.executeTask(dto.getTaskId(), dto.getAction(), dto.getHostname()));
}
@ApiOperation(value = "集群任务元信息")
@RequestMapping(value = "cluster-tasks/{taskId}/metadata", method = RequestMethod.GET)
@ResponseBody
public Result<ClusterTaskMetadataVO> getTaskMetadata(@PathVariable Long taskId) {
ClusterTaskDO clusterTaskDO = clusterTaskService.getById(taskId);
if (ValidateUtils.isNull(clusterTaskDO)) {
return Result.buildFrom(ResultStatus.RESOURCE_NOT_EXIST);
}
return new Result<>(ClusterTaskModelConverter.convert2ClusterTaskMetadataVO(
clusterTaskDO,
clusterService.getById(clusterTaskDO.getClusterId()),
kafkaFileService.getFileByFileName(clusterTaskDO.getServerProperties())
));
}
@ApiOperation(value = "集群任务状态", notes = "整个任务的状态")
@RequestMapping(value = "cluster-tasks/{taskId}/status", method = RequestMethod.GET)
@ResponseBody
public Result<ClusterTaskStatusVO> getTaskStatus(@PathVariable Long taskId) {
Result<ClusterTaskStatus> dtoResult = clusterTaskService.getTaskStatus(taskId);
if (!Constant.SUCCESS.equals(dtoResult.getCode())) {
return new Result<>(dtoResult.getCode(), dtoResult.getMessage());
}
return new Result<>(ClusterTaskModelConverter.convert2ClusterTaskStatusVO(
dtoResult.getData(),
PhysicalClusterMetadataManager.getBrokerHostKafkaRoleMap(dtoResult.getData().getClusterId())
));
}
@ApiOperation(value = "集群任务日志", notes = "具体机器的日志")
@RequestMapping(value = "cluster-tasks/{taskId}/log", method = RequestMethod.GET)
@ResponseBody
public Result<String> getTaskLog(@PathVariable Long taskId,
@RequestParam("hostname") String hostname) {
return clusterTaskService.getTaskLog(taskId, hostname);
}
@ApiOperation(value = "文件选择", notes = "")
@RequestMapping(value = "cluster-tasks/kafka-files", method = RequestMethod.GET)
@ResponseBody
public Result<List<ClusterTaskKafkaFilesVO>> getKafkaFiles(
@RequestParam(value = "clusterId", required = false) Long clusterId) {
List<KafkaFileDO> kafkaFileDOList = kafkaFileService.getKafkaFiles();
if (ValidateUtils.isEmptyList(kafkaFileDOList)) {
return new Result<>();
}
List<ClusterTaskKafkaFilesVO> voList = new ArrayList<>();
for (KafkaFileDO kafkaFileDO: kafkaFileDOList) {
if (KafkaFileEnum.SERVER_CONFIG.getCode().equals(kafkaFileDO.getFileType())
&& !kafkaFileDO.getClusterId().equals(clusterId)) {
continue;
}
ClusterTaskKafkaFilesVO vo = new ClusterTaskKafkaFilesVO();
vo.setFileName(kafkaFileDO.getFileName());
vo.setFileMd5(kafkaFileDO.getFileMd5());
vo.setFileType(kafkaFileDO.getFileType());
voList.add(vo);
}
return new Result<>(voList);
}
}

View File

@@ -0,0 +1,66 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.op;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.service.service.ExpertService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.ExpertConverter;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.expert.*;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 20/3/20
*/
@Api(tags = "OP-专家服务相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_OP_PREFIX)
public class OpExpertController {
@Autowired
private ExpertService expertService;
@Autowired
private ClusterService clusterService;
@ApiOperation(value = "热点Topic(Region内)", notes = "")
@RequestMapping(value = "expert/regions/hot-topics", method = RequestMethod.GET)
@ResponseBody
public Result<List<RegionHotTopicVO>> getRegionHotTopics() {
return new Result<>(
ExpertConverter.convert2RegionHotTopicVOList(expertService.getRegionHotTopics())
);
}
@ApiOperation(value = "Topic分区不足", notes = "")
@RequestMapping(value = "expert/topics/insufficient-partitions", method = RequestMethod.GET)
@ResponseBody
public Result<List<PartitionInsufficientTopicVO>> getPartitionInsufficientTopics() {
return new Result<>(ExpertConverter.convert2PartitionInsufficientTopicVOList(
expertService.getPartitionInsufficientTopics()
));
}
@ApiOperation(value = "Topic流量异常诊断", notes = "")
@RequestMapping(value = "expert/topics/anomaly-flow", method = RequestMethod.GET)
@ResponseBody
public Result<List<AnomalyFlowTopicVO>> getAnomalyFlowTopics(@RequestParam(value = "timestamp") Long timestamp) {
return new Result<>(ExpertConverter.convert2AnomalyFlowTopicVOList(
expertService.getAnomalyFlowTopics(timestamp)
));
}
@ApiOperation(value = "过期Topic", notes = "")
@RequestMapping(value = "expert/topics/expired", method = RequestMethod.GET)
@ResponseBody
public Result<List<ExpiredTopicVO>> getExpiredTopics() {
return new Result<>(ExpertConverter.convert2ExpiredTopicVOList(
expertService.getExpiredTopics(),
clusterService.list())
);
}
}

View File

@@ -0,0 +1,104 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.op;
import com.xiaojukeji.kafka.manager.common.bizenum.TopicReassignActionEnum;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.ao.reassign.ReassignStatus;
import com.xiaojukeji.kafka.manager.common.entity.dto.op.reassign.ReassignExecSubDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.op.reassign.ReassignTopicDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.reassign.ReassignTopicStatusVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.common.entity.dto.op.reassign.ReassignExecDTO;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.service.service.ReassignService;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.reassign.ReassignTaskVO;
import com.xiaojukeji.kafka.manager.web.converters.ReassignModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* Topic迁移相关接口
* @author zengqiao
* @date 19/4/3
*/
@Api(tags = "OP-Topic迁移相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_OP_PREFIX)
public class OpReassignController {
@Autowired
private ReassignService reassignService;
@ApiOperation(value = "创建迁移任务", notes = "")
@RequestMapping(value = {"reassign-tasks"}, method = RequestMethod.POST)
@ResponseBody
public Result createReassignTopicTask(@RequestBody List<ReassignTopicDTO> dtoList) {
if (ValidateUtils.isEmptyList(dtoList)) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
for (ReassignTopicDTO dto: dtoList) {
if (!dto.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
}
return Result.buildFrom(reassignService.createTask(dtoList, SpringTool.getUserName()));
}
@ApiOperation(value = "操作迁移任务[启动|修改|取消]", notes = "")
@RequestMapping(value = {"reassign-tasks"}, method = RequestMethod.PUT)
@ResponseBody
public Result executeReassignTopicTask(@RequestBody ReassignExecDTO dto) {
if (ValidateUtils.isNull(dto) || !dto.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
TopicReassignActionEnum action = TopicReassignActionEnum.getByAction(dto.getAction());
if (ValidateUtils.isNull(action)) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return Result.buildFrom(reassignService.modifyTask(dto, action));
}
@ApiOperation(value = "操作迁移子任务[修改|取消]", notes = "")
@RequestMapping(value = {"reassign-tasks/sub-tasks"}, method = RequestMethod.PUT)
@ResponseBody
public Result executeReassignTopicTask(@RequestBody ReassignExecSubDTO dto) {
if (ValidateUtils.isNull(dto) || !dto.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return Result.buildFrom(reassignService.modifySubTask(dto));
}
@ApiOperation(value = "迁移任务列表", notes = "")
@RequestMapping(value = {"reassign-tasks"}, method = RequestMethod.GET)
@ResponseBody
public Result<List<ReassignTaskVO>> getReassignTasks() {
return new Result<>(ReassignModelConverter.convert2ReassignTaskVOList(
reassignService.getReassignTaskList()
));
}
@ApiOperation(value = "迁移任务信息", notes = "")
@RequestMapping(value = {"reassign-tasks/{taskId}/detail"}, method = RequestMethod.GET)
@ResponseBody
public Result<ReassignTaskVO> getReassignTaskDetail(@PathVariable Long taskId) {
return new Result<>(ReassignModelConverter.convert2ReassignTaskVO(
taskId, reassignService.getTask(taskId)
));
}
@ApiOperation(value = "迁移任务状态", notes = "")
@RequestMapping(value = {"reassign-tasks/{taskId}/status"}, method = RequestMethod.GET)
@ResponseBody
public Result<List<ReassignTopicStatusVO>> getReassignTaskStatus(@PathVariable Long taskId) {
Result<List<ReassignStatus>> statusResult = reassignService.getReassignStatus(taskId);
if (!Constant.SUCCESS.equals(statusResult.getCode())) {
return new Result<>(statusResult.getCode(), statusResult.getMessage());
}
return new Result<>(ReassignModelConverter.convert2ReassignTopicStatusVOList(statusResult.getData()));
}
}

View File

@@ -0,0 +1,222 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.op;
import com.xiaojukeji.kafka.manager.common.bizenum.RebalanceDimensionEnum;
import com.xiaojukeji.kafka.manager.common.bizenum.TaskStatusEnum;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.constant.KafkaConstant;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.dto.ClusterTopicDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.op.RebalanceDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.op.topic.*;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.TopicDO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.service.service.AdminService;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.common.utils.JsonUtils;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.service.service.TopicManagerService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.common.entity.TopicOperationResult;
import com.xiaojukeji.kafka.manager.service.utils.TopicCommands;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
/**
* 运维工具类
* @author zengqiao
* @date 20/4/2
*/
@Api(tags = "OP-Utils相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_OP_PREFIX)
public class OpUtilsController {
@Autowired
private ClusterService clusterService;
@Autowired
private AdminService adminService;
@Autowired
private TopicManagerService topicManagerService;
@ApiOperation(value = "创建Topic")
@RequestMapping(value = {"utils/topics"}, method = RequestMethod.POST)
@ResponseBody
public Result createCommonTopic(@RequestBody TopicCreationDTO dto) {
Result<ClusterDO> rc = checkParamAndGetClusterDO(dto);
if (rc.getCode() != ResultStatus.SUCCESS.getCode()) {
return rc;
}
Properties properties = dto.getProperties();
if (ValidateUtils.isNull(properties)) {
properties = new Properties();
}
properties.put(KafkaConstant.RETENTION_MS_KEY, String.valueOf(dto.getRetentionTime()));
ResultStatus rs = adminService.createTopic(
rc.getData(),
TopicDO.buildFrom(dto),
dto.getPartitionNum(),
dto.getReplicaNum(),
dto.getRegionId(),
dto.getBrokerIdList(),
properties,
SpringTool.getUserName(),
SpringTool.getUserName()
);
return Result.buildFrom(rs);
}
@ApiOperation(value = "Topic扩分区", notes = "")
@RequestMapping(value = {"utils/expand-partitions"}, method = RequestMethod.PUT)
@ResponseBody
public Result<List<TopicOperationResult>> expandTopics(@RequestBody List<TopicExpansionDTO> dtoList) {
if (ValidateUtils.isNull(dtoList) || dtoList.size() > Constant.MAX_TOPIC_OPERATION_SIZE_PER_REQUEST) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
List<TopicOperationResult> resultList = new ArrayList<>();
for (TopicExpansionDTO dto: dtoList) {
Result<ClusterDO> rc = checkParamAndGetClusterDO(dto);
if (!Constant.SUCCESS.equals(rc.getCode())) {
resultList.add(TopicOperationResult.buildFrom(dto.getClusterId(), dto.getTopicName(), rc));
continue;
}
// 参数检查合法, 开始对Topic进行扩分区
ResultStatus statusEnum = adminService.expandPartitions(
rc.getData(),
dto.getTopicName(),
dto.getPartitionNum(),
dto.getRegionId(),
dto.getBrokerIdList(),
SpringTool.getUserName()
);
resultList.add(TopicOperationResult.buildFrom(dto.getClusterId(), dto.getTopicName(), statusEnum));
}
for (TopicOperationResult operationResult: resultList) {
if (!Constant.SUCCESS.equals(operationResult.getCode())) {
return Result.buildFrom(ResultStatus.OPERATION_FAILED, resultList);
}
}
return new Result<>(resultList);
}
@ApiOperation(value = "Topic删除", notes = "单次不允许超过10个Topic")
@RequestMapping(value = {"utils/topics"}, method = RequestMethod.DELETE)
@ResponseBody
public Result<List<TopicOperationResult>> deleteTopics(@RequestBody List<TopicDeletionDTO> dtoList) {
if (ValidateUtils.isNull(dtoList) || dtoList.size() > Constant.MAX_TOPIC_OPERATION_SIZE_PER_REQUEST) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
String operator = SpringTool.getUserName();
List<TopicOperationResult> resultList = new ArrayList<>();
for (TopicDeletionDTO dto: dtoList) {
Result<ClusterDO> rc = checkParamAndGetClusterDO(dto);
if (rc.getCode() != ResultStatus.SUCCESS.getCode()) {
resultList.add(TopicOperationResult.buildFrom(dto.getClusterId(), dto.getTopicName(), rc));
continue;
}
// 参数检查合法, 开始删除Topic
ResultStatus statusEnum = adminService.deleteTopic(rc.getData(), dto.getTopicName(), operator);
resultList.add(TopicOperationResult.buildFrom(dto.getClusterId(), dto.getTopicName(), statusEnum));
}
for (TopicOperationResult operationResult: resultList) {
if (!Constant.SUCCESS.equals(operationResult.getCode())) {
return Result.buildFrom(ResultStatus.OPERATION_FAILED, resultList);
}
}
return new Result<>(resultList);
}
@ApiOperation(value = "修改Topic", notes = "")
@RequestMapping(value = {"utils/topics"}, method = RequestMethod.PUT)
@ResponseBody
public Result modifyTopic(@RequestBody TopicModificationDTO dto) {
Result<ClusterDO> rc = checkParamAndGetClusterDO(dto);
if (rc.getCode() != ResultStatus.SUCCESS.getCode()) {
return rc;
}
ClusterDO clusterDO = rc.getData();
// 获取属性
Properties properties = dto.getProperties();
if (ValidateUtils.isNull(properties)) {
properties = new Properties();
}
properties.put(KafkaConstant.RETENTION_MS_KEY, String.valueOf(dto.getRetentionTime()));
// 操作修改
String operator = SpringTool.getUserName();
ResultStatus rs = TopicCommands.modifyTopicConfig(clusterDO, dto.getTopicName(), properties);
if (!ResultStatus.SUCCESS.equals(rs)) {
return Result.buildFrom(rs);
}
topicManagerService.modifyTopic(dto.getClusterId(), dto.getTopicName(), dto.getDescription(), operator);
return new Result();
}
@ApiOperation(value = "优先副本选举状态")
@RequestMapping(value = "utils/rebalance-status", method = RequestMethod.GET)
@ResponseBody
public Result preferredReplicaElectStatus(@RequestParam("clusterId") Long clusterId) {
ClusterDO clusterDO = clusterService.getById(clusterId);
if (ValidateUtils.isNull(clusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
TaskStatusEnum statusEnum = adminService.preferredReplicaElectionStatus(clusterDO);
return new Result<>(JsonUtils.toJson(statusEnum));
}
@ApiOperation(value = "优先副本选举")
@RequestMapping(value = "utils/rebalance", method = RequestMethod.POST)
@ResponseBody
public Result preferredReplicaElect(@RequestBody RebalanceDTO reqObj) {
if (!reqObj.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
ClusterDO clusterDO = clusterService.getById(reqObj.getClusterId());
if (ValidateUtils.isNull(clusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
String operator = SpringTool.getUserName();
ResultStatus rs = null;
if (RebalanceDimensionEnum.CLUSTER.getCode().equals(reqObj.getDimension())) {
rs = adminService.preferredReplicaElection(clusterDO, operator);
} else if (RebalanceDimensionEnum.BROKER.getCode().equals(reqObj.getDimension())) {
rs = adminService.preferredReplicaElection(clusterDO, reqObj.getBrokerId(), operator);
} else {
// TODO: 19/7/8 Topic维度 & Region维度 优先副本选举
}
return Result.buildFrom(rs);
}
private Result<ClusterDO> checkParamAndGetClusterDO(ClusterTopicDTO dto) {
if (!dto.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
ClusterDO clusterDO = clusterService.getById(dto.getClusterId());
if (ValidateUtils.isNull(clusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
return new Result<>(clusterDO);
}
}

View File

@@ -0,0 +1,77 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.AccountVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.AccountConverter;
import com.xiaojukeji.kafka.manager.common.entity.dto.rd.AccountDTO;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.pojo.AccountDO;
import com.xiaojukeji.kafka.manager.account.AccountService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 19/5/3
*/
@Api(tags = "RD-Account相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdAccountController {
private final static Logger logger = LoggerFactory.getLogger(RdAccountController.class);
@Autowired
private AccountService accountService;
@ApiOperation(value = "添加账号", notes = "")
@RequestMapping(value = "accounts", method = RequestMethod.POST)
@ResponseBody
public Result addAccount(@RequestBody AccountDTO dto) {
if (!dto.legal() || ValidateUtils.isNull(dto.getPassword())) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
ResultStatus rs = accountService.createAccount(AccountConverter.convert2AccountDO(dto));
return Result.buildFrom(rs);
}
@ApiOperation(value = "删除账号", notes = "")
@RequestMapping(value = "accounts", method = RequestMethod.DELETE)
@ResponseBody
public Result deleteAccount(@RequestParam("username") String username) {
ResultStatus rs = accountService.deleteByName(username);
return Result.buildFrom(rs);
}
@ApiOperation(value = "修改账号", notes = "")
@RequestMapping(value = "accounts", method = RequestMethod.PUT)
@ResponseBody
public Result updateAccount(@RequestBody AccountDTO reqObj) {
if (!reqObj.legal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
ResultStatus rs = accountService.updateAccount(AccountConverter.convert2AccountDO(reqObj));
return Result.buildFrom(rs);
}
@ApiOperation(value = "账号列表", notes = "")
@RequestMapping(value = "accounts", method = RequestMethod.GET)
@ResponseBody
public Result<List<AccountVO>> listAccounts() {
try {
List<AccountDO> accountDOList = accountService.list();
return new Result<>(AccountConverter.convert2AccountVOList(accountDOList));
} catch (Exception e) {
logger.error("listAccounts@AdminAccountController, list failed.", e);
}
return Result.buildFrom(ResultStatus.MYSQL_ERROR);
}
}

View File

@@ -0,0 +1,43 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.AppDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.app.AppVO;
import com.xiaojukeji.kafka.manager.service.service.gateway.AppService;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.AppConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 20/4/16
*/
@Api(tags = "RD-APP相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdAppController {
@Autowired
private AppService appService;
@ApiOperation(value = "App列表", notes = "")
@RequestMapping(value = "apps", method = RequestMethod.GET)
@ResponseBody
public Result<List<AppVO>> getApps() {
return new Result<>(AppConverter.convert2AppVOList(appService.listAll()));
}
@ApiOperation(value = "App修改", notes = "")
@RequestMapping(value = "apps", method = RequestMethod.PUT)
@ResponseBody
public Result modifyApps(@RequestBody AppDTO dto) {
return Result.buildFrom(
appService.updateByAppId(dto, SpringTool.getUserName(), true)
);
}
}

View File

@@ -0,0 +1,136 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.BillStaffDetailVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.BillStaffSummaryVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.BillTopicVO;
import com.xiaojukeji.kafka.manager.common.utils.DateUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.KafkaBillDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.LogicalClusterDO;
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.KafkaBillService;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.*;
/**
* @author zengqiao
* @date 20/4/26
*/
@Api(tags = "RD-Bill相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdBillController {
@Autowired
private KafkaBillService kafkaBillService;
@Autowired
private LogicalClusterMetadataManager logicalClusterMetadataManager;
@ApiOperation(value = "用户账单概览", notes = "")
@RequestMapping(value = "bills/staff-summary", method = RequestMethod.GET)
@ResponseBody
public Result<List<BillStaffSummaryVO>> getBillStaffSummary(@RequestParam("timestamp") Long timestamp) {
List<KafkaBillDO> kafkaBillDOList =
kafkaBillService.getByGmtDay(DateUtils.getFormattedDate(timestamp).substring(0, 7));
if (ValidateUtils.isEmptyList(kafkaBillDOList)) {
return new Result<>();
}
Map<String, BillStaffSummaryVO> ldapBillMap = new HashMap<>();
for (KafkaBillDO kafkaBillDO: kafkaBillDOList) {
BillStaffSummaryVO vo = ldapBillMap.get(kafkaBillDO.getPrincipal());
if (ValidateUtils.isNull(vo)) {
vo = new BillStaffSummaryVO();
vo.setUsername(kafkaBillDO.getPrincipal());
vo.setTopicNum(0);
vo.setQuota(0.0);
vo.setCost(0.0);
vo.setGmtMonth(kafkaBillDO.getGmtDay());
vo.setTimestamp(kafkaBillDO.getGmtCreate().getTime());
ldapBillMap.put(kafkaBillDO.getPrincipal(), vo);
}
vo.setTopicNum(vo.getTopicNum() + 1);
vo.setQuota(vo.getQuota() + kafkaBillDO.getQuota());
vo.setCost(vo.getCost() + kafkaBillDO.getCost());
}
return new Result<>(new ArrayList<>(ldapBillMap.values()));
}
@ApiOperation(value = "用户账单概览", notes = "")
@RequestMapping(value = "bills/{username}/staff-summary", method = RequestMethod.GET)
@ResponseBody
public Result<List<BillStaffSummaryVO>> getBillStaffSummary(@PathVariable String username,
@RequestParam("startTime") Long startTime,
@RequestParam("endTime") Long endTime) {
List<KafkaBillDO> kafkaBillDOList =
kafkaBillService.getByPrincipal(username, new Date(startTime), new Date(endTime));
if (ValidateUtils.isEmptyList(kafkaBillDOList)) {
return new Result<>();
}
Map<String, BillStaffSummaryVO> billMap = new TreeMap<>();
for (KafkaBillDO kafkaBillDO: kafkaBillDOList) {
BillStaffSummaryVO vo = billMap.get(kafkaBillDO.getGmtDay());
if (ValidateUtils.isNull(vo)) {
vo = new BillStaffSummaryVO();
vo.setUsername(SpringTool.getUserName());
vo.setTopicNum(0);
vo.setQuota(0.0);
vo.setCost(0.0);
vo.setGmtMonth(kafkaBillDO.getGmtDay());
vo.setTimestamp(kafkaBillDO.getGmtCreate().getTime());
billMap.put(kafkaBillDO.getGmtDay(), vo);
}
vo.setTopicNum(vo.getTopicNum() + 1);
vo.setQuota(vo.getQuota() + kafkaBillDO.getQuota());
vo.setCost(vo.getCost() + kafkaBillDO.getCost());
}
return new Result<>(new ArrayList<>(billMap.values()));
}
@ApiOperation(value = "用户账单详情", notes = "")
@RequestMapping(value = "bills/{username}/staff-detail", method = RequestMethod.GET)
@ResponseBody
public Result<BillStaffDetailVO> getBillStaffDetail(@PathVariable String username,
@RequestParam("timestamp") Long timestamp) {
List<KafkaBillDO> kafkaBillDOList =
kafkaBillService.getByGmtDay(DateUtils.getFormattedDate(timestamp).substring(0, 7));
if (ValidateUtils.isEmptyList(kafkaBillDOList)) {
return new Result<>();
}
BillStaffDetailVO billStaffDetailVO = new BillStaffDetailVO();
billStaffDetailVO.setUsername(username);
billStaffDetailVO.setBillList(new ArrayList<>());
Double costSum = 0.0;
for (KafkaBillDO kafkaBillDO: kafkaBillDOList) {
if (!kafkaBillDO.getPrincipal().equals(username)) {
continue;
}
BillTopicVO vo = new BillTopicVO();
vo.setClusterName("unknown");
LogicalClusterDO logicalClusterDO = logicalClusterMetadataManager.getTopicLogicalCluster(
kafkaBillDO.getClusterId(),
kafkaBillDO.getTopicName()
);
if (!ValidateUtils.isNull(logicalClusterDO)) {
vo.setClusterId(logicalClusterDO.getId());
vo.setClusterName(logicalClusterDO.getName());
}
vo.setTopicName(kafkaBillDO.getTopicName());
vo.setQuota(kafkaBillDO.getQuota());
vo.setCost(kafkaBillDO.getCost());
costSum += kafkaBillDO.getCost();
billStaffDetailVO.getBillList().add(vo);
}
billStaffDetailVO.setCostSum(costSum);
return new Result<>(billStaffDetailVO);
}
}

View File

@@ -0,0 +1,185 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.constant.KafkaMetricsCollections;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.ao.BrokerBasicDTO;
import com.xiaojukeji.kafka.manager.common.entity.ao.analysis.AnalysisBrokerDTO;
import com.xiaojukeji.kafka.manager.common.entity.metrics.BrokerMetrics;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.RealTimeMetricsVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.TopicOverviewVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.broker.*;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.BrokerMetadata;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.PartitionState;
import com.xiaojukeji.kafka.manager.common.entity.pojo.BrokerMetricsDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.RegionDO;
import com.xiaojukeji.kafka.manager.service.cache.PhysicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.AnalysisService;
import com.xiaojukeji.kafka.manager.service.service.BrokerService;
import com.xiaojukeji.kafka.manager.service.service.RegionService;
import com.xiaojukeji.kafka.manager.service.service.TopicService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.BrokerModelConverter;
import com.xiaojukeji.kafka.manager.web.converters.CommonModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author zengqiao
* @date 20/4/20
*/
@Api(tags = "RD-Broker相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdBrokerController {
@Autowired
private BrokerService brokerService;
@Autowired
private TopicService topicService;
@Autowired
private AnalysisService analysisService;
@Autowired
private RegionService regionService;
@ApiOperation(value = "Broker元信息")
@RequestMapping(value = "{clusterId}/brokers/broker-metadata", method = RequestMethod.GET)
@ResponseBody
public Result<List<BrokerMetadataVO>> getBrokerMetadataList(@PathVariable Long clusterId) {
List<Integer> brokerIdList = PhysicalClusterMetadataManager.getBrokerIdList(clusterId);
List<BrokerMetadataVO> brokerMetadataVOList = new ArrayList<>();
for (Integer brokerId : brokerIdList) {
BrokerMetadata brokerMetadata = PhysicalClusterMetadataManager.getBrokerMetadata(clusterId, brokerId);
if (brokerMetadata == null) {
continue;
}
brokerMetadataVOList.add(new BrokerMetadataVO(brokerMetadata.getBrokerId(), brokerMetadata.getHost()));
}
return new Result<>(brokerMetadataVOList);
}
@ApiOperation(value = "Broker基本信息")
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/basic-info", method = RequestMethod.GET)
@ResponseBody
public Result<BrokerBasicVO> getBrokerBasic(@PathVariable Long clusterId,
@PathVariable Integer brokerId) {
BrokerBasicDTO brokerBasicDTO = brokerService.getBrokerBasicDTO(clusterId, brokerId);
if (brokerBasicDTO == null) {
return Result.buildFrom(ResultStatus.BROKER_NOT_EXIST);
}
BrokerBasicVO brokerBasicVO = new BrokerBasicVO();
CopyUtils.copyProperties(brokerBasicVO, brokerBasicDTO);
return new Result<>(brokerBasicVO);
}
@ApiOperation(value = "Broker基本信息列表", notes = "")
@RequestMapping(value = "clusters/{clusterId}/brokers/basic-info", method = RequestMethod.GET)
@ResponseBody
public Result<List<RdBrokerBasicVO>> getBrokerBasics(@PathVariable("clusterId") Long clusterId) {
List<Integer> brokerIdList = PhysicalClusterMetadataManager.getBrokerIdList(clusterId);
if (ValidateUtils.isEmptyList(brokerIdList)) {
return new Result<>(new ArrayList<>());
}
List<RegionDO> regionDOList = regionService.listAll()
.stream()
.filter(regionDO -> clusterId.equals(regionDO.getClusterId()))
.collect(Collectors.toList());
Map<Integer, RegionDO> regionMap = new HashMap<>();
for (RegionDO regionDO : regionDOList) {
if (ValidateUtils.isNull(regionDO)) {
continue;
}
for (Integer brokerId : ListUtils.string2IntList(regionDO.getBrokerList())) {
regionMap.put(brokerId, regionDO);
}
}
return new Result<>(BrokerModelConverter.convert2RdBrokerBasicVO(clusterId, brokerIdList, regionMap));
}
@ApiOperation(value = "BrokerTopic信息")
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/topics", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicOverviewVO>> getBrokerTopics(@PathVariable Long clusterId,
@PathVariable Integer brokerId) {
return new Result<>(CommonModelConverter.convert2TopicOverviewVOList(
clusterId,
topicService.getTopicOverviewList(clusterId, brokerId)
));
}
@ApiOperation(value = "Broker分区信息")
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/partitions", method = RequestMethod.GET)
@ResponseBody
public Result<List<BrokerPartitionVO>> getBrokerPartitions(@PathVariable Long clusterId,
@PathVariable Integer brokerId) {
Map<String, List<PartitionState>> partitionStateMap = topicService.getTopicPartitionState(clusterId, brokerId);
if (ValidateUtils.isNull(partitionStateMap)) {
return Result.buildFrom(ResultStatus.BROKER_NOT_EXIST);
}
return new Result<>(BrokerModelConverter.convert2BrokerPartitionVOList(clusterId, brokerId, partitionStateMap));
}
@ApiOperation(value = "Broker实时流量")
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/metrics", method = RequestMethod.GET)
@ResponseBody
public Result<RealTimeMetricsVO> getBrokerMetrics(@PathVariable Long clusterId,
@PathVariable Integer brokerId) {
BrokerMetrics brokerMetrics = brokerService.getBrokerMetricsFromJmx(
clusterId,
brokerId,
KafkaMetricsCollections.COMMON_DETAIL_METRICS
);
if (ValidateUtils.isNull(brokerMetrics)) {
return Result.buildFrom(ResultStatus.BROKER_NOT_EXIST);
}
return new Result<>(CommonModelConverter.convert2RealTimeMetricsVO(brokerMetrics));
}
@ApiOperation(value = "Broker磁盘分区", notes = "")
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/partitions-location", method = RequestMethod.GET)
@ResponseBody
public Result<List<BrokerDiskTopicVO>> getBrokerPartitionsLocation(@PathVariable Long clusterId,
@PathVariable Integer brokerId) {
return new Result<>(CommonModelConverter.convert2BrokerDiskTopicVOList(
brokerService.getBrokerTopicLocation(clusterId, brokerId)
));
}
@ApiOperation(value = "Broker历史指标")
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/metrics-history", method = RequestMethod.GET)
@ResponseBody
public Result<List<BrokerMetricsVO>> getBrokerMetricsHistory(@PathVariable Long clusterId,
@PathVariable Integer brokerId,
@RequestParam("startTime") Long startTime,
@RequestParam("endTime") Long endTime) {
List<BrokerMetricsDO> metricsList =
brokerService.getBrokerMetricsFromDB(clusterId, brokerId, new Date(startTime), new Date(endTime));
return new Result<>(BrokerModelConverter.convert2BrokerMetricsVOList(metricsList));
}
@ApiOperation(value = "BrokerTopic分析")
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/analysis", method = RequestMethod.GET)
@ResponseBody
public Result<AnalysisBrokerVO> getTopicAnalyzer(@PathVariable Long clusterId, @PathVariable Integer brokerId) {
AnalysisBrokerDTO analysisBrokerDTO = analysisService.doAnalysisBroker(clusterId, brokerId);
return new Result<>(BrokerModelConverter.convert2AnalysisBrokerVO(analysisBrokerDTO));
}
@ApiOperation(value = "Broker删除", notes = "删除DB中的Broker信息")
@RequestMapping(value = "{clusterId}/brokers", method = RequestMethod.DELETE)
@ResponseBody
public Result deleteBrokerId(@PathVariable Long clusterId, @RequestParam("brokerId") Integer brokerId) {
return Result.buildFrom(brokerService.delete(clusterId, brokerId));
}
}

View File

@@ -0,0 +1,171 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.bizenum.KafkaClientEnum;
import com.xiaojukeji.kafka.manager.common.bizenum.PeakFlowStatusEnum;
import com.xiaojukeji.kafka.manager.common.constant.KafkaMetricsCollections;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.cluster.TopicMetadataVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster.RdClusterMetricsVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster.ClusterBrokerStatusVO;
import com.xiaojukeji.kafka.manager.common.entity.ao.BrokerOverviewDTO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.RegionDO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.KafkaControllerVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.RealTimeMetricsVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.TopicOverviewVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.BrokerOverviewVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster.ClusterDetailVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.TopicThrottleVO;
import com.xiaojukeji.kafka.manager.common.utils.DateUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.service.cache.PhysicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.*;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.*;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
/**
* @author zengqiao
* @date 20/4/1
*/
@Api(tags = "RD-Cluster维度相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdClusterController {
@Autowired
private ClusterService clusterService;
@Autowired
private BrokerService brokerService;
@Autowired
private TopicService topicService;
@Autowired
private RegionService regionService;
@Autowired
private ThrottleService throttleService;
@ApiOperation(value = "集群基本信息列表", notes = "默认不要详情")
@RequestMapping(value = "clusters/basic-info", method = RequestMethod.GET)
@ResponseBody
public Result<List<ClusterDetailVO>> getClusterBasicInfo(
@RequestParam(value = "need-detail", required = false) Boolean needDetail) {
if (ValidateUtils.isNull(needDetail)) {
needDetail = false;
}
return new Result<>(
ClusterModelConverter.convert2ClusterDetailVOList(clusterService.getClusterDetailDTOList(needDetail))
);
}
@ApiOperation(value = "集群基本信息", notes = "默认不要详情")
@RequestMapping(value = "clusters/{clusterId}/basic-info", method = RequestMethod.GET)
@ResponseBody
public Result<ClusterDetailVO> getClusterBasicInfo(
@PathVariable Long clusterId,
@RequestParam(value = "need-detail", required = false) Boolean needDetail) {
if (ValidateUtils.isNull(needDetail)) {
needDetail = false;
}
return new Result<>(
ClusterModelConverter.convert2ClusterDetailVO(clusterService.getClusterDetailDTO(clusterId, needDetail))
);
}
@ApiOperation(value = "集群实时流量")
@RequestMapping(value = "clusters/{clusterId}/metrics", method = RequestMethod.GET)
@ResponseBody
public Result<RealTimeMetricsVO> getClusterRealTimeMetrics(@PathVariable Long clusterId) {
return new Result<>(CommonModelConverter.convert2RealTimeMetricsVO(
brokerService.getBrokerMetricsFromJmx(
clusterId,
new HashSet<>(PhysicalClusterMetadataManager.getBrokerIdList(clusterId)),
KafkaMetricsCollections.COMMON_DETAIL_METRICS
)
));
}
@ApiOperation(value = "集群历史流量")
@RequestMapping(value = "clusters/{clusterId}/metrics-history", method = RequestMethod.GET)
@ResponseBody
public Result<List<RdClusterMetricsVO>> getClusterMetricsHistory(@PathVariable Long clusterId,
@RequestParam("startTime") Long startTime,
@RequestParam("endTime") Long endTime) {
return new Result<>(ClusterModelConverter.convert2RdClusterMetricsVOList(
clusterService.getClusterMetricsFromDB(
clusterId,
DateUtils.long2Date(startTime),
DateUtils.long2Date(endTime)
)
));
}
@ApiOperation(value = "集群Broker列表", notes = "")
@RequestMapping(value = "clusters/{clusterId}/brokers", method = RequestMethod.GET)
@ResponseBody
public Result<List<BrokerOverviewVO>> getBrokerOverview(@PathVariable Long clusterId) {
List<RegionDO> regionDOList = regionService.getByClusterId(clusterId);
List<BrokerOverviewDTO> brokerOverviewDTOList = brokerService.getBrokerOverviewList(clusterId, null);
return new Result<>(ClusterModelConverter.convert2BrokerOverviewList(brokerOverviewDTOList, regionDOList));
}
@ApiOperation(value = "集群Broker状态", notes = "饼状图")
@RequestMapping(value = "clusters/{clusterId}/brokers-status", method = RequestMethod.GET)
@ResponseBody
public Result<ClusterBrokerStatusVO> getClusterBrokerStatusVO(@PathVariable Long clusterId) {
return new Result<>(ClusterModelConverter.convert2ClusterBrokerStatusVO(
brokerService.getClusterBrokerStatus(clusterId))
);
}
@ApiOperation(value = "集群Topic列表")
@RequestMapping(value = "clusters/{clusterId}/topics", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicOverviewVO>> getTopicOverview(@PathVariable Long clusterId) {
return new Result<>(CommonModelConverter.convert2TopicOverviewVOList(
clusterId,
topicService.getTopicOverviewList(
clusterId,
PhysicalClusterMetadataManager.getTopicNameList(clusterId)
)
));
}
@ApiOperation(value = "集群Controller变更历史")
@RequestMapping(value = "clusters/{clusterId}/controller-history", method = RequestMethod.GET)
@ResponseBody
public Result<List<KafkaControllerVO>> getControllerHistory(@PathVariable Long clusterId) {
return new Result<>(ClusterModelConverter.convert2KafkaControllerVOList(
clusterService.getKafkaControllerHistory(clusterId)
));
}
@ApiOperation(value = "集群限流信息")
@RequestMapping(value = "clusters/{clusterId}/throttles", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicThrottleVO>> getThrottles(@PathVariable Long clusterId) {
return new Result<>(ClusterModelConverter.convert2TopicThrottleVOList(
throttleService.getThrottledTopicsFromJmx(
clusterId,
new HashSet<>(PhysicalClusterMetadataManager.getBrokerIdList(clusterId)),
Arrays.asList(KafkaClientEnum.values())
)
));
}
@ApiOperation(value = "集群Topic元信息列表", notes = "")
@RequestMapping(value = "clusters/{clusterId}/topic-metadata", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicMetadataVO>> getTopicMetadatas(@PathVariable("clusterId") Long clusterId) {
return new Result<>(ClusterModelConverter.convert2TopicMetadataVOList(clusterId));
}
}

View File

@@ -0,0 +1,70 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.bizenum.KafkaBrokerRoleEnum;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.service.service.ConfigService;
import com.xiaojukeji.kafka.manager.common.utils.JsonUtils;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.ConfigConverter;
import com.xiaojukeji.kafka.manager.common.entity.dto.config.ConfigDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.ConfigVO;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 20/3/19
*/
@Api(tags = "RD-Config相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdConfigController {
@Autowired
private ConfigService configService;
@ApiOperation(value = "配置列表")
@RequestMapping(value = "configs", method = RequestMethod.GET)
@ResponseBody
public Result<List<ConfigVO>> getConfigList() {
return new Result<>(ConfigConverter.convert2ConfigVOList(configService.listAll()));
}
@ApiOperation(value = "修改配置")
@RequestMapping(value = "configs", method = RequestMethod.PUT)
@ResponseBody
public Result modifyConfig(@RequestBody ConfigDTO dto) {
if (!dto.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return Result.buildFrom(configService.updateByKey(dto));
}
@ApiOperation(value = "新增配置")
@RequestMapping(value = "configs", method = RequestMethod.POST)
@ResponseBody
public Result createConfig(@RequestBody ConfigDTO dto) {
if (!dto.paramLegal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return Result.buildFrom(configService.insert(dto));
}
@ApiOperation(value = "删除配置")
@RequestMapping(value = "configs", method = RequestMethod.DELETE)
@ResponseBody
public Result deleteById(@RequestParam("config-key") String configKey) {
return Result.buildFrom(configService.deleteByKey(configKey));
}
@ApiOperation(value = "Kafka的角色列表", notes = "")
@RequestMapping(value = "configs/kafka-roles", method = RequestMethod.GET)
@ResponseBody
public Result getKafkaBrokerRoleEnum() {
return new Result<>(JsonUtils.toJson(KafkaBrokerRoleEnum.class));
}
}

View File

@@ -0,0 +1,62 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.bizenum.OffsetLocationEnum;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.consumer.ConsumerGroupVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.service.service.ConsumerService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.ConsumerModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 20/4/21
*/
@Api(tags = "RD-Consumer维度相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdConsumerController {
@Autowired
private ConsumerService consumerService;
@Autowired
private ClusterService clusterService;
@ApiOperation(value = "集群ConsumerGroup列表", notes="")
@RequestMapping(value = "{clusterId}/consumer-groups", method = RequestMethod.GET)
@ResponseBody
public Result<List<ConsumerGroupVO>> getConsumerGroupList(@PathVariable Long clusterId) {
return new Result<>(
ConsumerModelConverter.convert2ConsumerGroupVOList(consumerService.getConsumerGroupList(clusterId))
);
}
@ApiOperation(value = "消费组消费的Topic列表", notes = "")
@RequestMapping(value = "{clusterId}/consumer-groups/{consumerGroup}/topics", method = RequestMethod.GET)
@ResponseBody
public Result<List<String>> getConsumerGroupConsumedTopicList(@PathVariable Long clusterId,
@PathVariable String consumerGroup,
@RequestParam("location") String location) {
ClusterDO clusterDO = clusterService.getById(clusterId);
if (ValidateUtils.isNull(clusterDO)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
OffsetLocationEnum offsetLocation = OffsetLocationEnum.getOffsetStoreLocation(location.toLowerCase());
if (ValidateUtils.isNull(offsetLocation)) {
return Result.buildFrom(ResultStatus.CG_LOCATION_ILLEGAL);
}
return new Result<>(
consumerService.getConsumerGroupConsumedTopicList(clusterId, consumerGroup, offsetLocation.location)
);
}
}

View File

@@ -0,0 +1,79 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.bizenum.KafkaFileEnum;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.dto.normal.KafkaFileDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.KafkaFileVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.kcm.component.storage.common.StorageEnum;
import com.xiaojukeji.kafka.manager.common.entity.pojo.KafkaFileDO;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import com.xiaojukeji.kafka.manager.kcm.KafkaFileService;
import com.xiaojukeji.kafka.manager.common.utils.JsonUtils;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.KafkaFileConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author zengqiao
* @date 20/4/26
*/
@Api(tags = "RD-Package管理相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdKafkaFileController {
@Autowired
private ClusterService clusterService;
@Autowired
private KafkaFileService kafkaFileService;
@ApiOperation(value = "文件枚举信息", notes = "")
@RequestMapping(value = "kafka-files/enums", method = RequestMethod.GET)
@ResponseBody
public Result getKafkaFileEnums() {
Map<String, Object> enumMap = new HashMap<>(2);
enumMap.put("fileEnum", JsonUtils.toJson(KafkaFileEnum.class));
enumMap.put("storageEnum", JsonUtils.toJson(StorageEnum.class));
return new Result<>(enumMap);
}
@ApiOperation(value = "上传文件", notes = "")
@RequestMapping(value = "kafka-files", method = RequestMethod.POST)
@ResponseBody
public Result uploadKafkaFile(KafkaFileDTO dto) {
if (ValidateUtils.isNull(dto.getModify()) || !dto.getModify()) {
return Result.buildFrom(kafkaFileService.uploadKafkaFile(dto, SpringTool.getUserName()));
}
return Result.buildFrom(kafkaFileService.modifyKafkaFile(dto, SpringTool.getUserName()));
}
@ApiOperation(value = "删除文件", notes = "")
@RequestMapping(value = "kafka-files", method = RequestMethod.DELETE)
@ResponseBody
public Result deleteKafkaFile(@RequestParam("id") Long id) {
return Result.buildFrom(kafkaFileService.deleteKafkaFile(id));
}
@ApiOperation(value = "文件列表", notes = "")
@RequestMapping(value = "kafka-files", method = RequestMethod.GET)
@ResponseBody
public Result<List<KafkaFileVO>> getKafkaFiles() {
List<KafkaFileDO> kafkaFileDOList = kafkaFileService.getKafkaFiles();
return new Result<>(KafkaFileConverter.convertKafkaFileVOList(kafkaFileDOList, clusterService));
}
@ApiOperation(value = "文件预览", notes = "")
@RequestMapping(value = "kafka-files/{fileId}/config-files", method = RequestMethod.GET)
public Result<String> previewKafkaFile(@PathVariable("fileId") Long fileId) {
return kafkaFileService.downloadKafkaConfigFile(fileId);
}
}

View File

@@ -0,0 +1,81 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.dto.rd.LogicalClusterDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster.LogicalClusterVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.service.service.LogicalClusterService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.LogicalClusterModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 20/6/29
*/
@Api(tags = "RD-LogicalCluster维度相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdLogicalClusterController {
@Autowired
private LogicalClusterService logicalClusterService;
@ApiOperation(value = "逻辑集群创建", notes = "")
@RequestMapping(value = "logical-clusters", method = RequestMethod.POST)
@ResponseBody
public Result createNew(@RequestBody LogicalClusterDTO dto) {
if (!dto.legal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return Result.buildFrom(
logicalClusterService.createLogicalCluster(LogicalClusterModelConverter.convert2LogicalClusterDO(dto))
);
}
@ApiOperation(value = "逻辑集群删除", notes = "")
@RequestMapping(value = "logical-clusters", method = RequestMethod.DELETE)
@ResponseBody
public Result deleteById(@RequestParam("id") Long id) {
return Result.buildFrom(logicalClusterService.deleteById(id));
}
@ApiOperation(value = "逻辑集群更新", notes = "")
@RequestMapping(value = "logical-clusters", method = RequestMethod.PUT)
@ResponseBody
public Result updateById(@RequestBody LogicalClusterDTO dto) {
if (!dto.legal() || ValidateUtils.isNull(dto.getId())) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return Result.buildFrom(
logicalClusterService.updateById(LogicalClusterModelConverter.convert2LogicalClusterDO(dto))
);
}
@ApiOperation(value = "查询逻辑集群列表", notes = "")
@RequestMapping(value = "logical-clusters", method = RequestMethod.GET)
@ResponseBody
public Result<LogicalClusterVO> getByLogicalClusterId(@RequestParam("id") Long physicalClusterId) {
return new Result<>(
LogicalClusterModelConverter.convert2LogicalClusterVO(
logicalClusterService.getById(physicalClusterId)
)
);
}
@ApiOperation(value = "查询逻辑集群列表", notes = "")
@RequestMapping(value = "{physicalClusterId}/logical-clusters", method = RequestMethod.GET)
@ResponseBody
public Result<List<LogicalClusterVO>> getByPhysicalClusterId(@PathVariable Long physicalClusterId) {
return new Result<>(
LogicalClusterModelConverter.convert2LogicalClusterVOList(
logicalClusterService.getByPhysicalClusterId(physicalClusterId)
)
);
}
}

View File

@@ -0,0 +1,33 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.dto.ClusterTopicDTO;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author zengqiao
* @date 20/5/14
*/
@Api(tags = "RD-通知相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdNotifyController {
private final static Logger LOGGER = LoggerFactory.getLogger(RdNotifyController.class);
@ApiOperation(value = "Topic过期通知", notes = "")
@RequestMapping(value = "notifications/topic-expired", method = RequestMethod.POST)
@ResponseBody
public Result createKafkaFile(List<ClusterTopicDTO> dataList) {
return new Result();
}
}

View File

@@ -0,0 +1,46 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.dto.rd.OperateRecordDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.OperateRecordVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.service.service.OperateRecordService;
import com.xiaojukeji.kafka.manager.web.converters.OperateRecordModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zhongyuankai_i
* @date 20/09/03
*/
@Api(tags = "RD-operate相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdOperateRecordController {
private static final int MAX_RECORD_COUNT = 200;
@Autowired
private OperateRecordService operateRecordService;
@ApiOperation(value = "查询操作记录", notes = "")
@RequestMapping(value = "operate-record", method = RequestMethod.POST)
@ResponseBody
public Result<List<OperateRecordVO>> geOperateRecords(@RequestBody OperateRecordDTO dto) {
if (ValidateUtils.isNull(dto) || !dto.legal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
List<OperateRecordVO> voList = OperateRecordModelConverter.convert2OperateRecordVOList(operateRecordService.queryByCondt(dto));
if (voList.size() > MAX_RECORD_COUNT) {
voList = voList.subList(0, MAX_RECORD_COUNT);
}
return new Result<>(voList);
}
}

View File

@@ -0,0 +1,62 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.RegionModelConverter;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.RegionVO;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.service.service.RegionService;
import com.xiaojukeji.kafka.manager.common.entity.dto.rd.RegionDTO;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 19/4/21
*/
@Api(tags = "RD-Region维度相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdRegionController {
@Autowired
private RegionService regionService;
@ApiOperation(value = "Region创建", notes = "")
@RequestMapping(value = "regions", method = RequestMethod.POST)
@ResponseBody
public Result createNew(@RequestBody RegionDTO dto) {
if (!dto.legal()) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return Result.buildFrom(regionService.createRegion(RegionModelConverter.convert2RegionDO(dto)));
}
@ApiOperation(value = "Region删除", notes = "")
@RequestMapping(value = "regions", method = RequestMethod.DELETE)
@ResponseBody
public Result deleteById(@RequestParam("id") Long id) {
return Result.buildFrom(regionService.deleteById(id));
}
@ApiOperation(value = "Region更新", notes = "")
@RequestMapping(value = "regions", method = RequestMethod.PUT)
@ResponseBody
public Result updateRegion(@RequestBody RegionDTO dto) {
if (!dto.legal() || ValidateUtils.isNull(dto.getId())) {
return Result.buildFrom(ResultStatus.PARAM_ILLEGAL);
}
return Result.buildFrom(regionService.updateRegion(RegionModelConverter.convert2RegionDO(dto)));
}
@ApiOperation(value = "查询Region列表", notes = "")
@RequestMapping(value = "{clusterId}/regions", method = RequestMethod.GET)
@ResponseBody
public Result<List<RegionVO>> getRegionList(@PathVariable Long clusterId) {
return new Result<>(RegionModelConverter.convert2RegionVOList(regionService.getByClusterId(clusterId)));
}
}

View File

@@ -0,0 +1,65 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.dto.rd.CustomScheduledTaskDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.CustomScheduledTaskVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.utils.SpringTool;
import com.xiaojukeji.kafka.manager.task.component.AbstractScheduledTask;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* @author zengqiao
* @date 20/8/11
*/
@Api(tags = "RD-Schedule相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdScheduledController {
@ApiOperation(value = "调度任务列表", notes = "")
@RequestMapping(value = "scheduled-tasks", method = RequestMethod.GET)
public Result<List<CustomScheduledTaskVO>> listAllScheduledTasks() {
Map<String, AbstractScheduledTask> beanMap = SpringTool.getBeansOfType(AbstractScheduledTask.class);
List<CustomScheduledTaskVO> voList = new ArrayList<>();
for (Map.Entry<String, AbstractScheduledTask> entry: beanMap.entrySet()) {
CustomScheduledTaskVO vo = new CustomScheduledTaskVO();
vo.setName(entry.getKey());
vo.setCron(entry.getValue().getCron());
voList.add(vo);
}
return new Result<>(voList);
}
@ApiOperation(value = "触发执行调度任务", response = Result.class)
@RequestMapping(value = "scheduled-tasks/{scheduledName}/run", method = RequestMethod.GET)
public Result triggerScheduledTask(@PathVariable String scheduledName) {
AbstractScheduledTask scheduledTask = SpringTool.getBean(scheduledName, AbstractScheduledTask.class);
if(ValidateUtils.isNull(scheduledTask)){
return Result.buildFrom(ResultStatus.RESOURCE_NOT_EXIST);
}
scheduledTask.scheduleAllTaskFunction();
return Result.buildSuc();
}
@ApiOperation(value = "修改任务调度周期", response = Result.class)
@RequestMapping(value = "scheduled-tasks", method = RequestMethod.PUT)
public Result modifyScheduledTask(@RequestBody CustomScheduledTaskDTO dto) {
AbstractScheduledTask scheduledTask = SpringTool.getBean(dto.getName(), AbstractScheduledTask.class);
if(ValidateUtils.isNull(scheduledTask)){
return Result.buildFrom(ResultStatus.RESOURCE_NOT_EXIST);
}
if (scheduledTask.modifyCron(dto.getName(), dto.getCron())) {
return Result.buildSuc();
}
return Result.buildFrom(ResultStatus.OPERATION_FAILED);
}
}

View File

@@ -0,0 +1,75 @@
package com.xiaojukeji.kafka.manager.web.api.versionone.rd;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.ao.RdTopicBasic;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.RdTopicBasicVO;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.TopicBrokerVO;
import com.xiaojukeji.kafka.manager.service.cache.LogicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.cache.PhysicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.service.TopicManagerService;
import com.xiaojukeji.kafka.manager.service.service.TopicService;
import com.xiaojukeji.kafka.manager.common.constant.ApiPrefix;
import com.xiaojukeji.kafka.manager.web.converters.TopicModelConverter;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 20/3/31
*/
@Api(tags = "RD-Topic相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_RD_PREFIX)
public class RdTopicController {
@Autowired
private LogicalClusterMetadataManager logicalClusterMetadataManager;
@Autowired
private TopicService topicService;
@Autowired
private TopicManagerService topicManagerService;
@ApiOperation(value = "TopicBroker信息", notes = "")
@RequestMapping(value = "{clusterId}/topics/{topicName}/brokers", method = RequestMethod.GET)
@ResponseBody
public Result<List<TopicBrokerVO>> getTopicBrokers(
@PathVariable Long clusterId,
@PathVariable String topicName,
@RequestParam(value = "isPhysicalClusterId", required = false) Boolean isPhysicalClusterId) {
Long physicalClusterId = logicalClusterMetadataManager.getPhysicalClusterId(clusterId, isPhysicalClusterId);
if (ValidateUtils.isNull(physicalClusterId)) {
return Result.buildFrom(ResultStatus.CLUSTER_NOT_EXIST);
}
if (!PhysicalClusterMetadataManager.isTopicExist(physicalClusterId, topicName)) {
return Result.buildFrom(ResultStatus.TOPIC_NOT_EXIST);
}
return new Result<>(TopicModelConverter.convert2TopicBrokerVO(
physicalClusterId,
topicService.getTopicBrokerList(physicalClusterId, topicName))
);
}
@ApiOperation(value = "查询Topic信息", notes = "")
@RequestMapping(value = "{physicalClusterId}/topics/{topicName}/basic-info", method = RequestMethod.GET)
@ResponseBody
public Result<RdTopicBasicVO> getTopicBasic(@PathVariable Long physicalClusterId,
@PathVariable String topicName) {
Result<RdTopicBasic> result = topicManagerService.getRdTopicBasic(physicalClusterId, topicName);
if (!Constant.SUCCESS.equals(result.getCode())) {
return new Result<>(result.getCode(), result.getMessage());
}
RdTopicBasicVO vo = new RdTopicBasicVO();
CopyUtils.copyProperties(vo, result.getData());
vo.setProperties(result.getData().getProperties());
return new Result<>(vo);
}
}

View File

@@ -0,0 +1,51 @@
package com.xiaojukeji.kafka.manager.web.config;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
/**
* @author zengqiao
* @date 20/3/17
*/
@Configuration
public class DataSourceConfig {
@Bean(name = "dataSource")
@ConfigurationProperties(prefix = "spring.datasource.kafka-manager")
@Primary
public DataSource dataSource() {
return DataSourceBuilder.create().build();
}
@Bean(name = "sqlSessionFactory")
@Primary
public SqlSessionFactory sqlSessionFactory(@Qualifier("dataSource") DataSource dataSource) throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(dataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath:mapper/*.xml"));
bean.setConfigLocation(new PathMatchingResourcePatternResolver().getResource("classpath:mybatis-config.xml"));
return bean.getObject();
}
@Bean(name = "transactionManager")
@Primary
public DataSourceTransactionManager transactionManager(@Qualifier("dataSource") DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}
@Bean(name = "sqlSession")
@Primary
public SqlSessionTemplate sqlSessionTemplate(@Qualifier("sqlSessionFactory") SqlSessionFactory sqlSessionFactory) throws Exception {
return new SqlSessionTemplate(sqlSessionFactory);
}
}

View File

@@ -0,0 +1,49 @@
package com.xiaojukeji.kafka.manager.web.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.*;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
/**
* swagger配置页面
* @author huangyiminghappy@163.com
* @date 2019-05-09
*/
@Configuration
@EnableWebMvc
@EnableSwagger2
public class SwaggerConfig implements WebMvcConfigurer {
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
}
@Bean
public Docket createRestApi() {
return new Docket(DocumentationType.SWAGGER_2)
.apiInfo(apiInfo())
.select()
.apis(RequestHandlerSelectors.basePackage("com.xiaojukeji.kafka.manager.web.api"))
.paths(PathSelectors.any())
.build()
.enable(true);
}
private ApiInfo apiInfo() {
return new ApiInfoBuilder()
.title("Kafka云平台-接口文档")
.description("欢迎使用滴滴出行开源kafka-manager")
.contact("huangyiminghappy@163.com")
.version("2.0")
.build();
}
}

View File

@@ -0,0 +1,46 @@
package com.xiaojukeji.kafka.manager.web.config;
import com.xiaojukeji.kafka.manager.web.inteceptor.PermissionInterceptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
* @author zengqiao
* @date 20/1/19
*/
@SpringBootConfiguration
@Component
@DependsOn({"permissionInterceptor"})
public class WebMvcConfig implements WebMvcConfigurer {
@Autowired
private PermissionInterceptor permissionInterceptor;
@Override
public void addViewControllers(ViewControllerRegistry registry) {
registry.addViewController("/").setViewName("index");
registry.addViewController("/index.html").setViewName("index");
registry.addViewController("/kafka").setViewName("index");
registry.addViewController("/kafka/**").setViewName("index");
}
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(permissionInterceptor).addPathPatterns("/api/v1/**");
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
// SWAGGER
registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
// FE
registry.addResourceHandler("index.html", "/**").addResourceLocations("classpath:/templates/","classpath:/static/");
}
}

View File

@@ -0,0 +1,37 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.dto.rd.AccountDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.AccountVO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.AccountDO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 19/5/3
*/
public class AccountConverter {
public static AccountDO convert2AccountDO(AccountDTO dto) {
AccountDO accountDO = new AccountDO();
accountDO.setUsername(dto.getUsername());
accountDO.setPassword(dto.getPassword());
accountDO.setRole(dto.getRole());
return accountDO;
}
public static List<AccountVO> convert2AccountVOList(List<AccountDO> accountDOList) {
if (ValidateUtils.isNull(accountDOList)) {
return new ArrayList<>();
}
List<AccountVO> voList = new ArrayList<>();
for (AccountDO accountDO: accountDOList) {
AccountVO vo = new AccountVO();
vo.setUsername(accountDO.getUsername());
vo.setRole(accountDO.getRole());
voList.add(vo);
}
return voList;
}
}

View File

@@ -0,0 +1,70 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.ao.gateway.TopicQuota;
import com.xiaojukeji.kafka.manager.common.bizenum.TopicAuthorityEnum;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.AppDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.AuthorityDO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.QuotaVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.app.AppTopicAuthorityVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.app.AppVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* @author zengqiao
* @date 20/5/4
*/
public class AppConverter {
public static List<AppVO> convert2AppVOList(List<AppDO> doList) {
if (ValidateUtils.isNull(doList)) {
return new ArrayList<>();
}
List<AppVO> voList = new ArrayList<>();
for (AppDO elem: doList) {
voList.add(convert2AppVO(elem));
}
return voList;
}
public static AppVO convert2AppVO(AppDO appDO) {
if (ValidateUtils.isNull(appDO)) {
return null;
}
AppVO vo = new AppVO();
vo.setAppId(appDO.getAppId());
vo.setName(appDO.getName());
vo.setPassword(appDO.getPassword());
vo.setDescription(appDO.getDescription());
vo.setPrincipals(appDO.getPrincipals());
return vo;
}
public static List<QuotaVO> convert2QuotaVOList(Long clusterId, TopicQuota quotaDO) {
if (ValidateUtils.isNull(quotaDO)) {
return null;
}
QuotaVO vo = new QuotaVO();
vo.setClusterId(clusterId);
vo.setTopicName(quotaDO.getTopicName());
vo.setAppId(quotaDO.getAppId());
vo.setProduceQuota(quotaDO.getProduceQuota());
vo.setConsumeQuota(quotaDO.getConsumeQuota());
return Arrays.asList(vo);
}
public static AppTopicAuthorityVO convert2AppTopicAuthorityVO(String appId,
String topicName,
AuthorityDO authority) {
AppTopicAuthorityVO vo = new AppTopicAuthorityVO();
vo.setAppId(appId);
vo.setTopicName(topicName);
vo.setAccess(ValidateUtils.isNull(authority) ? TopicAuthorityEnum.DENY.getCode() : authority.getAccess());
return vo;
}
}

View File

@@ -0,0 +1,191 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.ao.analysis.AnalysisBrokerDTO;
import com.xiaojukeji.kafka.manager.common.entity.ao.analysis.AnalysisTopicDTO;
import com.xiaojukeji.kafka.manager.common.entity.metrics.BrokerMetrics;
import com.xiaojukeji.kafka.manager.common.utils.jmx.JmxConstant;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.BrokerMetadata;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.PartitionState;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.TopicMetadata;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.BrokerMetricsDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.RegionDO;
import com.xiaojukeji.kafka.manager.service.cache.PhysicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.broker.*;
import com.xiaojukeji.kafka.manager.service.utils.MetricsConvertUtils;
import java.util.*;
/**
* @author zengqiao
* @date 19/4/21
*/
public class BrokerModelConverter {
public static List<BrokerPartitionVO> convert2BrokerPartitionVOList(Long clusterId,
Integer brokerId,
Map<String, List<PartitionState>> stateMap) {
List<BrokerPartitionVO> voList = new ArrayList<>();
for (String topicName : stateMap.keySet()) {
BrokerPartitionVO vo = convert2BrokerPartitionsVO(clusterId, brokerId, topicName, stateMap.get(topicName));
if (ValidateUtils.isNull(vo)) {
continue;
}
voList.add(vo);
}
return voList;
}
private static BrokerPartitionVO convert2BrokerPartitionsVO(Long clusterId,
Integer brokerId,
String topicName,
List<PartitionState> stateList) {
TopicMetadata topicMetadata = PhysicalClusterMetadataManager.getTopicMetadata(clusterId, topicName);
if (ValidateUtils.isNull(stateList) || ValidateUtils.isNull(topicMetadata)) {
return null;
}
Set<Integer> leaderPartitionIdSet = new HashSet<>();
Set<Integer> followerPartitionIdSet = new HashSet<>();
Set<Integer> notUnderReplicatedPartitionIdSet = new HashSet<>();
for (PartitionState partitionState : stateList) {
List<Integer> replicaIdList =
topicMetadata.getPartitionMap().getPartitions().get(partitionState.getPartitionId());
if (brokerId.equals(partitionState.getLeader())) {
leaderPartitionIdSet.add(partitionState.getPartitionId());
}
if (replicaIdList.contains(brokerId)) {
followerPartitionIdSet.add(partitionState.getPartitionId());
}
if (replicaIdList.contains(brokerId) && partitionState.getIsr().size() < replicaIdList.size()) {
notUnderReplicatedPartitionIdSet.add(partitionState.getPartitionId());
}
}
BrokerPartitionVO vo = new BrokerPartitionVO();
vo.setTopicName(topicName);
vo.setLeaderPartitionList(new ArrayList<>(leaderPartitionIdSet));
vo.setFollowerPartitionIdList(new ArrayList<>(followerPartitionIdSet));
vo.setNotUnderReplicatedPartitionIdList(new ArrayList<>(notUnderReplicatedPartitionIdSet));
vo.setUnderReplicated(notUnderReplicatedPartitionIdSet.isEmpty());
return vo;
}
public static List<BrokerMetricsVO> convert2BrokerMetricsVOList(List<BrokerMetricsDO> metricsDOList) {
if (ValidateUtils.isNull(metricsDOList)) {
return new ArrayList<>();
}
List<BrokerMetrics> metricsList = MetricsConvertUtils.convert2BrokerMetricsList(metricsDOList);
List<BrokerMetricsVO> voList = new ArrayList<>();
for (BrokerMetrics metrics : metricsList) {
if (ValidateUtils.isNull(metrics)) {
continue;
}
BrokerMetricsVO vo = new BrokerMetricsVO();
vo.setHealthScore(metrics.getSpecifiedMetrics(JmxConstant.HEALTH_SCORE, Integer.class));
vo.setBytesInPerSec(
metrics.getSpecifiedMetrics("BytesInPerSecOneMinuteRate")
);
vo.setBytesOutPerSec(
metrics.getSpecifiedMetrics("BytesOutPerSecOneMinuteRate")
);
vo.setBytesRejectedPerSec(
metrics.getSpecifiedMetrics("BytesRejectedPerSecOneMinuteRate")
);
vo.setMessagesInPerSec(
metrics.getSpecifiedMetrics("MessagesInPerSecOneMinuteRate")
);
vo.setProduceRequestPerSec(
metrics.getSpecifiedMetrics("ProduceRequestsPerSecOneMinuteRate")
);
vo.setFetchConsumerRequestPerSec(
metrics.getSpecifiedMetrics("FetchConsumerRequestsPerSecOneMinuteRate")
);
vo.setRequestHandlerIdlPercent(
metrics.getSpecifiedMetrics("RequestHandlerAvgIdlePercentOneMinuteRate")
);
vo.setNetworkProcessorIdlPercent(
metrics.getSpecifiedMetrics("NetworkProcessorAvgIdlePercentValue")
);
vo.setRequestQueueSize(
metrics.getSpecifiedMetrics("RequestQueueSizeValue", Integer.class)
);
vo.setResponseQueueSize(
metrics.getSpecifiedMetrics("ResponseQueueSizeValue", Integer.class)
);
vo.setLogFlushTime(
metrics.getSpecifiedMetrics("LogFlushRateAndTimeMs95thPercentile")
);
vo.setFailFetchRequestPerSec(
metrics.getSpecifiedMetrics("FailedFetchRequestsPerSecOneMinuteRate")
);
vo.setFailProduceRequestPerSec(
metrics.getSpecifiedMetrics("FailedProduceRequestsPerSecOneMinuteRate")
);
vo.setTotalTimeFetchConsumer99Th(
metrics.getSpecifiedMetrics("FetchConsumerTotalTimeMs99thPercentile")
);
vo.setTotalTimeProduce99Th(
metrics.getSpecifiedMetrics("ProduceTotalTimeMs99thPercentile")
);
vo.setGmtCreate(
metrics.getSpecifiedMetrics("CreateTime", Long.class)
);
voList.add(vo);
}
return voList;
}
public static AnalysisBrokerVO convert2AnalysisBrokerVO(AnalysisBrokerDTO analysisBrokerDTO) {
if (analysisBrokerDTO == null) {
return null;
}
AnalysisBrokerVO analysisBrokerVO = new AnalysisBrokerVO();
analysisBrokerVO.setClusterId(analysisBrokerDTO.getClusterId());
analysisBrokerVO.setBrokerId(analysisBrokerDTO.getBrokerId());
analysisBrokerVO.setBaseTime(System.currentTimeMillis());
analysisBrokerVO.setTopicAnalysisVOList(new ArrayList<>());
analysisBrokerVO.setBytesIn(analysisBrokerDTO.getBytesIn());
analysisBrokerVO.setBytesOut(analysisBrokerDTO.getBytesOut());
analysisBrokerVO.setMessagesIn(analysisBrokerDTO.getMessagesIn());
analysisBrokerVO.setTotalProduceRequests(analysisBrokerDTO.getTotalProduceRequests());
analysisBrokerVO.setTotalFetchRequests(analysisBrokerDTO.getTotalFetchRequests());
for (AnalysisTopicDTO analysisTopicDTO : analysisBrokerDTO.getTopicAnalysisVOList()) {
AnalysisTopicVO analysisTopicVO = new AnalysisTopicVO();
analysisTopicVO.setTopicName(analysisTopicDTO.getTopicName());
analysisTopicVO.setBytesIn(String.format("%.2f", analysisTopicDTO.getBytesIn()));
analysisTopicVO.setBytesInRate(String.format("%.2f", analysisTopicDTO.getBytesInRate()));
analysisTopicVO.setBytesOut(String.format("%.2f", analysisTopicDTO.getBytesOut()));
analysisTopicVO.setBytesOutRate(String.format("%.2f", analysisTopicDTO.getBytesOutRate()));
analysisTopicVO.setMessagesIn(String.format("%.2f", analysisTopicDTO.getMessagesIn()));
analysisTopicVO.setMessagesInRate(String.format("%.2f", analysisTopicDTO.getMessagesInRate()));
analysisTopicVO.setTotalFetchRequests(String.format("%.2f", analysisTopicDTO.getTotalFetchRequests()));
analysisTopicVO.setTotalFetchRequestsRate(String.format("%.2f", analysisTopicDTO.getTotalFetchRequestsRate()));
analysisTopicVO.setTotalProduceRequests(String.format("%.2f", analysisTopicDTO.getTotalProduceRequests()));
analysisTopicVO.setTotalProduceRequestsRate(String.format("%.2f", analysisTopicDTO.getTotalProduceRequestsRate()));
analysisBrokerVO.getTopicAnalysisVOList().add(analysisTopicVO);
}
return analysisBrokerVO;
}
public static List<RdBrokerBasicVO> convert2RdBrokerBasicVO(Long clusterId,
List<Integer> brokerIdList,
Map<Integer, RegionDO> regionMap) {
List<RdBrokerBasicVO> basicList = new ArrayList<>();
for (Integer brokerId : brokerIdList) {
BrokerMetadata metadata = PhysicalClusterMetadataManager.getBrokerMetadata(clusterId, brokerId);
if (ValidateUtils.isNull(metadata)) {
continue;
}
RdBrokerBasicVO basicInfoVO = new RdBrokerBasicVO();
basicInfoVO.setBrokerId(brokerId);
basicInfoVO.setHost(metadata.getHost());
if (regionMap.containsKey(brokerId)) {
basicInfoVO.setLogicClusterId(regionMap.get(brokerId).getId());
}
basicList.add(basicInfoVO);
}
return basicList;
}
}

View File

@@ -0,0 +1,252 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.ao.BrokerOverviewDTO;
import com.xiaojukeji.kafka.manager.common.entity.ao.ClusterDetailDTO;
import com.xiaojukeji.kafka.manager.common.entity.ao.cluster.ClusterBrokerStatus;
import com.xiaojukeji.kafka.manager.common.entity.ao.cluster.LogicalCluster;
import com.xiaojukeji.kafka.manager.common.entity.ao.cluster.LogicalClusterMetrics;
import com.xiaojukeji.kafka.manager.common.entity.dto.rd.ClusterDTO;
import com.xiaojukeji.kafka.manager.common.entity.metrics.ClusterMetrics;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.BrokerOverviewVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.TopicThrottleVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.cluster.LogicClusterVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.cluster.NormalClusterMetricsVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.cluster.TopicMetadataVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.KafkaControllerVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster.ClusterBrokerStatusVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster.ClusterDetailVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster.RdClusterMetricsVO;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.utils.jmx.JmxConstant;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.TopicMetadata;
import com.xiaojukeji.kafka.manager.common.entity.metrics.TopicThrottledMetrics;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterMetricsDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ControllerDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.RegionDO;
import com.xiaojukeji.kafka.manager.service.cache.PhysicalClusterMetadataManager;
import com.xiaojukeji.kafka.manager.service.utils.MetricsConvertUtils;
import java.util.*;
/**
* cluster相关转换
* @author huangyiminghappy@163.com
* @date 2019/3/15
*/
public class ClusterModelConverter {
public static List<LogicClusterVO> convert2LogicClusterVOList(List<LogicalCluster> dtoList) {
if (ValidateUtils.isEmptyList(dtoList)) {
return new ArrayList<>();
}
List<LogicClusterVO> voList = new ArrayList<>();
for (LogicalCluster elem: dtoList) {
voList.add(convert2LogicClusterVO(elem));
}
return voList;
}
public static LogicClusterVO convert2LogicClusterVO(LogicalCluster logicalCluster) {
LogicClusterVO vo = new LogicClusterVO();
CopyUtils.copyProperties(vo, logicalCluster);
vo.setClusterId(logicalCluster.getLogicalClusterId());
vo.setClusterName(logicalCluster.getLogicalClusterName());
return vo;
}
public static List<KafkaControllerVO> convert2KafkaControllerVOList(List<ControllerDO> doList) {
if (ValidateUtils.isNull(doList)) {
return new ArrayList<>();
}
List<KafkaControllerVO> voList = new ArrayList<>();
for (ControllerDO elem: doList) {
KafkaControllerVO vo = new KafkaControllerVO();
vo.setBrokerId(elem.getBrokerId());
vo.setHost(elem.getHost());
vo.setVersion(elem.getVersion());
vo.setTimestamp(elem.getTimestamp());
voList.add(vo);
}
return voList;
}
public static ClusterDO convert2ClusterDO(ClusterDTO reqObj) {
ClusterDO clusterDO = new ClusterDO();
CopyUtils.copyProperties(clusterDO, reqObj);
clusterDO.setId(reqObj.getClusterId());
clusterDO.setSecurityProperties(
ValidateUtils.isNull(clusterDO.getSecurityProperties())? "": clusterDO.getSecurityProperties()
);
return clusterDO;
}
public static ClusterDetailVO convert2ClusterDetailVO(ClusterDetailDTO dto) {
if (ValidateUtils.isNull(dto)) {
return null;
}
ClusterDetailVO vo = new ClusterDetailVO();
CopyUtils.copyProperties(vo, dto);
if (ValidateUtils.isNull(vo.getRegionNum())) {
vo.setRegionNum(0);
}
return vo;
}
public static List<ClusterDetailVO> convert2ClusterDetailVOList(List<ClusterDetailDTO> dtoList) {
if (ValidateUtils.isNull(dtoList)) {
return new ArrayList<>();
}
List<ClusterDetailVO> voList = new ArrayList<>();
for (ClusterDetailDTO dto: dtoList) {
voList.add(convert2ClusterDetailVO(dto));
}
return voList;
}
public static List<NormalClusterMetricsVO> convert2NormalClusterMetricsVOList(
List<LogicalClusterMetrics> dataList) {
if (ValidateUtils.isEmptyList(dataList)) {
return new ArrayList<>();
}
List<NormalClusterMetricsVO> voList = new ArrayList<>();
for (LogicalClusterMetrics data: dataList) {
NormalClusterMetricsVO vo = new NormalClusterMetricsVO();
CopyUtils.copyProperties(vo, data);
voList.add(vo);
}
return voList;
}
public static List<RdClusterMetricsVO> convert2RdClusterMetricsVOList(List<ClusterMetricsDO> dataList) {
if (ValidateUtils.isNull(dataList)) {
return new ArrayList<>();
}
List<ClusterMetrics> metricsList = MetricsConvertUtils.convert2ClusterMetricsList(dataList);
List<RdClusterMetricsVO> voList = new ArrayList<>();
for (ClusterMetrics metrics: metricsList) {
RdClusterMetricsVO vo = new RdClusterMetricsVO();
vo.setClusterId(metrics.getClusterId());
vo.setTopicNum(metrics.getSpecifiedMetrics(JmxConstant.TOPIC_NUM));
vo.setPartitionNum(metrics.getSpecifiedMetrics(JmxConstant.PARTITION_NUM));
vo.setBrokerNum(metrics.getSpecifiedMetrics(JmxConstant.BROKER_NUM));
vo.setBytesInPerSec(metrics.getSpecifiedMetrics("BytesInPerSecOneMinuteRate"));
vo.setBytesOutPerSec(metrics.getSpecifiedMetrics("BytesOutPerSecOneMinuteRate"));
vo.setBytesRejectedPerSec(metrics.getSpecifiedMetrics("BytesRejectedPerSecOneMinuteRate"));
vo.setMessagesInPerSec(metrics.getSpecifiedMetrics("MessagesInPerSecOneMinuteRate"));
vo.setGmtCreate(metrics.getSpecifiedMetrics(JmxConstant.CREATE_TIME, Long.class));
voList.add(vo);
}
return voList;
}
public static List<BrokerOverviewVO> convert2BrokerOverviewList(List<BrokerOverviewDTO> brokerOverviewDTOList,
List<RegionDO> regionDOList) {
if (ValidateUtils.isEmptyList(brokerOverviewDTOList)) {
return new ArrayList<>();
}
Map<Integer, String> brokerIdRegionNameMap = convert2BrokerIdRegionNameMap(regionDOList);
List<BrokerOverviewVO> brokerOverviewVOList = new ArrayList<>();
for (BrokerOverviewDTO brokerOverviewDTO: brokerOverviewDTOList) {
BrokerOverviewVO brokerOverviewVO = new BrokerOverviewVO();
brokerOverviewVO.setBrokerId(brokerOverviewDTO.getBrokerId());
brokerOverviewVO.setHost(brokerOverviewDTO.getHost());
brokerOverviewVO.setPort(brokerOverviewDTO.getPort());
brokerOverviewVO.setJmxPort(brokerOverviewDTO.getJmxPort());
brokerOverviewVO.setStartTime(brokerOverviewDTO.getStartTime());
brokerOverviewVO.setByteIn(brokerOverviewDTO.getByteIn());
brokerOverviewVO.setByteOut(brokerOverviewDTO.getByteOut());
brokerOverviewVO.setPartitionCount(brokerOverviewDTO.getPartitionCount());
brokerOverviewVO.setUnderReplicated(brokerOverviewDTO.getUnderReplicated());
brokerOverviewVO.setUnderReplicatedPartitions(brokerOverviewDTO.getUnderReplicatedPartitions());
brokerOverviewVO.setStatus(brokerOverviewDTO.getStatus());
brokerOverviewVO.setKafkaVersion(brokerOverviewDTO.getKafkaVersion());
brokerOverviewVO.setLeaderCount(brokerOverviewDTO.getLeaderCount());
brokerOverviewVO.setRegionName(brokerIdRegionNameMap.getOrDefault(brokerOverviewDTO.getBrokerId(), ""));
brokerOverviewVO.setPeakFlowStatus(brokerOverviewDTO.getPeakFlowStatus());
brokerOverviewVOList.add(brokerOverviewVO);
}
return brokerOverviewVOList;
}
private static Map<Integer, String> convert2BrokerIdRegionNameMap(List<RegionDO> regionDOList) {
Map<Integer, String> brokerIdRegionNameMap = new HashMap<>();
if (regionDOList == null) {
regionDOList = new ArrayList<>();
}
for (RegionDO regionDO: regionDOList) {
List<Integer> brokerIdList = ListUtils.string2IntList(regionDO.getBrokerList());
if (brokerIdList == null || brokerIdList.isEmpty()) {
continue;
}
for (Integer brokerId: brokerIdList) {
brokerIdRegionNameMap.put(brokerId, regionDO.getName());
}
}
return brokerIdRegionNameMap;
}
public static List<TopicMetadataVO> convert2TopicMetadataVOList(List<TopicMetadata> metadataList) {
if (ValidateUtils.isEmptyList(metadataList)) {
return new ArrayList<>();
}
List<TopicMetadataVO> voList = new ArrayList<>();
for (TopicMetadata topicMetadata: metadataList) {
TopicMetadataVO vo = new TopicMetadataVO();
vo.setTopicName(topicMetadata.getTopic());
voList.add(vo);
}
return voList;
}
public static List<TopicMetadataVO> convert2TopicMetadataVOList(Long clusterId) {
if (ValidateUtils.isNull(clusterId)) {
return new ArrayList<>();
}
List<TopicMetadataVO> voList = new ArrayList<>();
for (String topicName: PhysicalClusterMetadataManager.getTopicNameList(clusterId)) {
TopicMetadata topicMetadata = PhysicalClusterMetadataManager.getTopicMetadata(clusterId, topicName);
if (ValidateUtils.isNull(topicMetadata)) {
continue;
}
try {
TopicMetadataVO vo = new TopicMetadataVO();
vo.setTopicName(topicMetadata.getTopic());
vo.setPartitionIdList(new ArrayList<>(topicMetadata.getPartitionMap().getPartitions().keySet()));
voList.add(vo);
} catch (Exception e) {
}
}
return voList;
}
public static List<TopicThrottleVO> convert2TopicThrottleVOList(List<TopicThrottledMetrics> metricsList) {
if (ValidateUtils.isNull(metricsList)) {
return new ArrayList<>();
}
List<TopicThrottleVO> voList = new ArrayList<>();
for (TopicThrottledMetrics metrics: metricsList) {
TopicThrottleVO vo = new TopicThrottleVO();
vo.setTopicName(metrics.getTopicName());
vo.setAppId(metrics.getAppId());
vo.setBrokerIdList(new ArrayList<>(metrics.getBrokerIdSet()));
vo.setThrottleClientType(metrics.getClientType().getName());
voList.add(vo);
}
return voList;
}
public static ClusterBrokerStatusVO convert2ClusterBrokerStatusVO(ClusterBrokerStatus clusterBrokerStatus) {
if (ValidateUtils.isNull(clusterBrokerStatus)) {
return null;
}
ClusterBrokerStatusVO vo = new ClusterBrokerStatusVO();
vo.setBrokerBytesInStatusList(clusterBrokerStatus.getBrokerBytesInStatusList());
vo.setBrokerReplicaStatusList(clusterBrokerStatus.getBrokerReplicaStatusList());
return vo;
}
}

View File

@@ -0,0 +1,116 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterTaskDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.KafkaFileDO;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.task.ClusterTaskMetadataVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.task.ClusterTaskStatusVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.task.ClusterTaskSubStatusVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.task.ClusterTaskVO;
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.ClusterTaskStatus;
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.kcm.common.entry.ao.ClusterTaskSubStatus;
import com.xiaojukeji.kafka.manager.kcm.common.bizenum.ClusterTaskSubStateEnum;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* @author zengqiao
* @date 20/5/21
*/
public class ClusterTaskModelConverter {
public static List<ClusterTaskVO> convert2ClusterTaskVOList(List<ClusterTaskDO> doList,
Map<Long, String> clusterNameMap) {
if (ValidateUtils.isEmptyList(doList)) {
return new ArrayList<>();
}
List<ClusterTaskVO> voList = new ArrayList<>();
for (ClusterTaskDO clusterTaskDO: doList) {
ClusterTaskVO vo = new ClusterTaskVO();
vo.setTaskId(clusterTaskDO.getId());
vo.setTaskType(clusterTaskDO.getTaskType());
vo.setClusterId(clusterTaskDO.getClusterId());
vo.setClusterName(clusterNameMap.getOrDefault(clusterTaskDO.getClusterId(), ""));
vo.setStatus(clusterTaskDO.getTaskStatus());
vo.setOperator(clusterTaskDO.getOperator());
vo.setCreateTime(clusterTaskDO.getCreateTime().getTime());
voList.add(vo);
}
return voList;
}
public static ClusterTaskMetadataVO convert2ClusterTaskMetadataVO(ClusterTaskDO clusterTaskDO,
ClusterDO clusterDO,
KafkaFileDO kafkaFileDO) {
if (ValidateUtils.isNull(clusterTaskDO)) {
return null;
}
ClusterTaskMetadataVO vo = new ClusterTaskMetadataVO();
vo.setTaskId(clusterTaskDO.getId());
vo.setClusterId(clusterTaskDO.getClusterId());
vo.setClusterName(ValidateUtils.isNull(clusterDO)? "": clusterDO.getClusterName());
vo.setHostList(ListUtils.string2StrList(clusterTaskDO.getHostList()));
vo.setPauseHostList(ListUtils.string2StrList(clusterTaskDO.getPauseHostList()));
vo.setRollbackHostList(ListUtils.string2StrList(clusterTaskDO.getRollbackHostList()));
vo.setRollbackPauseHostList(ListUtils.string2StrList(clusterTaskDO.getRollbackPauseHostList()));
vo.setKafkaPackageName(clusterTaskDO.getKafkaPackage());
vo.setKafkaPackageMd5(clusterTaskDO.getKafkaPackageMd5());
vo.setServerPropertiesFileId(ValidateUtils.isNull(kafkaFileDO)? null: kafkaFileDO.getId());
vo.setServerPropertiesName(clusterTaskDO.getServerProperties());
vo.setServerPropertiesMd5(clusterTaskDO.getServerPropertiesMd5());
vo.setOperator(clusterTaskDO.getOperator());
vo.setGmtCreate(clusterTaskDO.getCreateTime().getTime());
return vo;
}
public static ClusterTaskStatusVO convert2ClusterTaskStatusVO(ClusterTaskStatus clusterTaskStatus,
Map<String, List<String>> hostRoleMap) {
if (ValidateUtils.isNull(clusterTaskStatus)) {
return null;
}
ClusterTaskStatusVO clusterTaskStatusVO = new ClusterTaskStatusVO();
clusterTaskStatusVO.setTaskId(clusterTaskStatus.getTaskId());
if (ValidateUtils.isNull(clusterTaskStatus.getStatus())) {
clusterTaskStatusVO.setStatus(null);
} else {
clusterTaskStatusVO.setStatus(clusterTaskStatus.getStatus().getCode());
}
clusterTaskStatusVO.setRollback(clusterTaskStatus.getRollback());
clusterTaskStatusVO.setSumCount(clusterTaskStatus.getSubStatusList().size());
clusterTaskStatusVO.setSuccessCount(0);
clusterTaskStatusVO.setFailedCount(0);
clusterTaskStatusVO.setRunningCount(0);
clusterTaskStatusVO.setWaitingCount(0);
clusterTaskStatusVO.setSubTaskStatusList(new ArrayList<>());
for (ClusterTaskSubStatus elem: clusterTaskStatus.getSubStatusList()) {
ClusterTaskSubStatusVO vo = new ClusterTaskSubStatusVO();
vo.setHostname(elem.getHostname());
vo.setStatus(elem.getStatus().getCode());
vo.setKafkaRoles(
ListUtils.strList2String(hostRoleMap.getOrDefault(elem.getHostname(), new ArrayList<>()))
);
vo.setGroupId(elem.getGroupNum());
// 任务状态
if (ClusterTaskSubStateEnum.WAITING.equals(elem.getStatus())) {
clusterTaskStatusVO.setWaitingCount(clusterTaskStatusVO.getWaitingCount() + 1);
} else if (ClusterTaskSubStateEnum.RUNNING.equals(elem.getStatus())
|| ClusterTaskSubStateEnum.KILLING.equals(elem.getStatus())) {
clusterTaskStatusVO.setRunningCount(clusterTaskStatusVO.getRunningCount() + 1);
} else if (ClusterTaskSubStateEnum.FAILED.equals(elem.getStatus())
|| ClusterTaskSubStateEnum.TIMEOUT.equals(elem.getStatus())
|| ClusterTaskSubStateEnum.CANCELED.equals(elem.getStatus())
|| ClusterTaskSubStateEnum.IGNORED.equals(elem.getStatus())
|| ClusterTaskSubStateEnum.KILL_FAILED.equals(elem.getStatus())) {
clusterTaskStatusVO.setFailedCount(clusterTaskStatusVO.getFailedCount() + 1);
} else if (ClusterTaskSubStateEnum.SUCCEED.equals(elem.getStatus())) {
clusterTaskStatusVO.setSuccessCount(clusterTaskStatusVO.getSuccessCount() + 1);
}
clusterTaskStatusVO.getSubTaskStatusList().add(vo);
}
return clusterTaskStatusVO;
}
}

View File

@@ -0,0 +1,131 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.ao.TopicDiskLocation;
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.TopicOverview;
import com.xiaojukeji.kafka.manager.common.entity.metrics.BaseMetrics;
import com.xiaojukeji.kafka.manager.common.entity.metrics.BrokerMetrics;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.broker.BrokerDiskTopicVO;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.RealTimeMetricsVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.TopicOverviewVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.service.utils.MetricsConvertUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* @author zengqiao
* @date 20/4/22
*/
public class CommonModelConverter {
public static RealTimeMetricsVO convert2RealTimeMetricsVO(List<BrokerMetrics> dataList) {
if (ValidateUtils.isNull(dataList)) {
return null;
}
BaseMetrics baseMetrics = MetricsConvertUtils.merge2BaseMetricsByAdd(dataList);
return convert2RealTimeMetricsVO(baseMetrics);
}
public static RealTimeMetricsVO convert2RealTimeMetricsVO(BaseMetrics metrics) {
if (ValidateUtils.isNull(metrics)) {
return null;
}
RealTimeMetricsVO vo = new RealTimeMetricsVO();
vo.setByteIn(Arrays.asList(
metrics.getSpecifiedMetrics("BytesInPerSecMeanRate", Double.class),
metrics.getSpecifiedMetrics("BytesInPerSecOneMinuteRate", Double.class),
metrics.getSpecifiedMetrics("BytesInPerSecFiveMinuteRate", Double.class),
metrics.getSpecifiedMetrics("BytesInPerSecFifteenMinuteRate", Double.class)
));
vo.setByteOut(Arrays.asList(
metrics.getSpecifiedMetrics("BytesOutPerSecMeanRate", Double.class),
metrics.getSpecifiedMetrics("BytesOutPerSecOneMinuteRate", Double.class),
metrics.getSpecifiedMetrics("BytesOutPerSecFiveMinuteRate", Double.class),
metrics.getSpecifiedMetrics("BytesOutPerSecFifteenMinuteRate", Double.class)
));
vo.setMessageIn(Arrays.asList(
metrics.getSpecifiedMetrics("MessagesInPerSecMeanRate", Double.class),
metrics.getSpecifiedMetrics("MessagesInPerSecOneMinuteRate", Double.class),
metrics.getSpecifiedMetrics("MessagesInPerSecFiveMinuteRate", Double.class),
metrics.getSpecifiedMetrics("MessagesInPerSecFifteenMinuteRate", Double.class)
));
vo.setByteRejected(Arrays.asList(
metrics.getSpecifiedMetrics("BytesRejectedPerSecMeanRate", Double.class),
metrics.getSpecifiedMetrics("BytesRejectedPerSecOneMinuteRate", Double.class),
metrics.getSpecifiedMetrics("BytesRejectedPerSecFiveMinuteRate", Double.class),
metrics.getSpecifiedMetrics("BytesRejectedPerSecFifteenMinuteRate", Double.class)
));
vo.setFailedProduceRequest(Arrays.asList(
metrics.getSpecifiedMetrics("FailedProduceRequestsPerSecMeanRate", Double.class),
metrics.getSpecifiedMetrics("FailedProduceRequestsPerSecOneMinuteRate", Double.class),
metrics.getSpecifiedMetrics("FailedProduceRequestsPerSecFiveMinuteRate", Double.class),
metrics.getSpecifiedMetrics("FailedProduceRequestsPerSecFifteenMinuteRate", Double.class)
));
vo.setFailedFetchRequest(Arrays.asList(
metrics.getSpecifiedMetrics("FailedFetchRequestsPerSecMeanRate", Double.class),
metrics.getSpecifiedMetrics("FailedFetchRequestsPerSecOneMinuteRate", Double.class),
metrics.getSpecifiedMetrics("FailedFetchRequestsPerSecFiveMinuteRate", Double.class),
metrics.getSpecifiedMetrics("FailedFetchRequestsPerSecFifteenMinuteRate", Double.class)
));
vo.setTotalProduceRequest(Arrays.asList(
metrics.getSpecifiedMetrics("TotalProduceRequestsPerSecMeanRate", Double.class),
metrics.getSpecifiedMetrics("TotalProduceRequestsPerSecOneMinuteRate", Double.class),
metrics.getSpecifiedMetrics("TotalProduceRequestsPerSecFiveMinuteRate", Double.class),
metrics.getSpecifiedMetrics("TotalProduceRequestsPerSecFifteenMinuteRate", Double.class)
));
vo.setTotalFetchRequest(Arrays.asList(
metrics.getSpecifiedMetrics("TotalFetchRequestsPerSecMeanRate", Double.class),
metrics.getSpecifiedMetrics("TotalFetchRequestsPerSecOneMinuteRate", Double.class),
metrics.getSpecifiedMetrics("TotalFetchRequestsPerSecFiveMinuteRate", Double.class),
metrics.getSpecifiedMetrics("TotalFetchRequestsPerSecFifteenMinuteRate", Double.class)
));
return vo;
}
public static List<TopicOverviewVO> convert2TopicOverviewVOList(Long clusterId,
List<TopicOverview> dtoList) {
if (ValidateUtils.isEmptyList(dtoList)) {
return new ArrayList<>();
}
List<TopicOverviewVO> voList = new ArrayList<>();
for (TopicOverview dto : dtoList) {
TopicOverviewVO vo = new TopicOverviewVO();
CopyUtils.copyProperties(vo, dto);
vo.setClusterId(clusterId);
voList.add(vo);
}
return voList;
}
public static List<BrokerDiskTopicVO> convert2BrokerDiskTopicVOList(List<TopicDiskLocation> locationList) {
if (ValidateUtils.isEmptyList(locationList)) {
return new ArrayList<>();
}
List<BrokerDiskTopicVO> voList = new ArrayList<>();
for (TopicDiskLocation location: locationList) {
BrokerDiskTopicVO vo = new BrokerDiskTopicVO();
vo.setClusterId(location.getClusterId());
vo.setTopicName(location.getTopicName());
vo.setBrokerId(location.getBrokerId());
vo.setDiskName(location.getDiskName());
vo.setLeaderPartitions(location.getLeaderPartitions());
vo.setFollowerPartitions(location.getFollowerPartitions());
vo.setUnderReplicated(location.getUnderReplicated());
vo.setNotUnderReplicatedPartitions(location.getUnderReplicatedPartitions());
voList.add(vo);
}
return voList;
}
}

View File

@@ -0,0 +1,33 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ConfigDO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.ConfigVO;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 20/3/19
*/
public class ConfigConverter {
public static List<ConfigVO> convert2ConfigVOList(List<ConfigDO> doList) {
if (ValidateUtils.isEmptyList(doList)) {
return new ArrayList<>();
}
List<ConfigVO> voList = new ArrayList<>();
for (ConfigDO configDO: doList) {
ConfigVO vo = new ConfigVO();
vo.setId(configDO.getId());
vo.setConfigKey(configDO.getConfigKey());
vo.setConfigValue(configDO.getConfigValue());
vo.setConfigDescription(configDO.getConfigDescription());
vo.setGmtCreate(configDO.getGmtCreate().getTime());
vo.setGmtModify(configDO.getGmtModify().getTime());
voList.add(vo);
}
return voList;
}
}

View File

@@ -0,0 +1,58 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.consumer.ConsumerGroupDetailVO;
import com.xiaojukeji.kafka.manager.common.entity.ao.consumer.ConsumeDetailDTO;
import com.xiaojukeji.kafka.manager.common.entity.ao.consumer.ConsumerGroupDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.consumer.ConsumerGroupVO;
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 19/4/3
*/
public class ConsumerModelConverter {
public static List<ConsumerGroupDetailVO> convert2ConsumerGroupDetailVO(String topicName,
String consumeGroup,
String location,
List<ConsumeDetailDTO> consumeDetailDTOList) {
if (consumeDetailDTOList == null || consumeDetailDTOList.isEmpty()) {
return new ArrayList<>();
}
List<ConsumerGroupDetailVO> consumerGroupDetailVOList = new ArrayList<>();
for (ConsumeDetailDTO consumeDetailDTO : consumeDetailDTOList) {
ConsumerGroupDetailVO consumerGroupDetailVO = new ConsumerGroupDetailVO();
consumerGroupDetailVO.setTopicName(topicName);
consumerGroupDetailVO.setConsumerGroup(consumeGroup);
consumerGroupDetailVO.setLocation(location);
consumerGroupDetailVO.setPartitionId(consumeDetailDTO.getPartitionId());
consumerGroupDetailVO.setClientId(consumeDetailDTO.getConsumerId());
consumerGroupDetailVO.setConsumeOffset(consumeDetailDTO.getConsumeOffset());
consumerGroupDetailVO.setPartitionOffset(consumeDetailDTO.getOffset());
if (consumeDetailDTO.getOffset() != null && consumeDetailDTO.getConsumeOffset() != null) {
consumerGroupDetailVO.setLag(consumeDetailDTO.getOffset() - consumeDetailDTO.getConsumeOffset());
}
consumerGroupDetailVOList.add(consumerGroupDetailVO);
}
return consumerGroupDetailVOList;
}
public static List<ConsumerGroupVO> convert2ConsumerGroupVOList(List<ConsumerGroupDTO> consumeGroupDTOList) {
if (consumeGroupDTOList == null || consumeGroupDTOList.isEmpty()) {
return new ArrayList<>();
}
List<ConsumerGroupVO> consumerGroupVOList = new ArrayList<>();
for (ConsumerGroupDTO consumeGroupDTO : consumeGroupDTOList) {
ConsumerGroupVO vo = new ConsumerGroupVO();
vo.setConsumerGroup(consumeGroupDTO.getConsumerGroup());
vo.setAppIds(ListUtils.strList2String(consumeGroupDTO.getAppIdList()));
vo.setLocation(consumeGroupDTO.getOffsetStoreLocation().location);
consumerGroupVOList.add(vo);
}
return consumerGroupVOList;
}
}

View File

@@ -0,0 +1,108 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.ao.expert.TopicAnomalyFlow;
import com.xiaojukeji.kafka.manager.common.entity.ao.expert.TopicInsufficientPartition;
import com.xiaojukeji.kafka.manager.common.entity.ao.expert.TopicRegionHot;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.expert.*;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.TopicExpiredDO;
import java.util.*;
/**
* @author zengqiao
* @date 20/3/29
*/
public class ExpertConverter {
public static List<RegionHotTopicVO> convert2RegionHotTopicVOList(List<TopicRegionHot> hotTopicList) {
if (ValidateUtils.isEmptyList(hotTopicList)) {
return new ArrayList<>();
}
List<RegionHotTopicVO> voList = new ArrayList<>();
for (TopicRegionHot hotTopic: hotTopicList) {
RegionHotTopicVO vo = new RegionHotTopicVO();
vo.setClusterId(hotTopic.getClusterDO().getId());
vo.setClusterName(hotTopic.getClusterDO().getClusterName());
vo.setTopicName(hotTopic.getTopicName());
vo.setDetailList(new ArrayList<>());
for (Map.Entry<Integer, Integer> entry: hotTopic.getBrokerIdPartitionNumMap().entrySet()) {
BrokerIdPartitionNumVO numVO = new BrokerIdPartitionNumVO();
numVO.setBrokeId(entry.getKey());
numVO.setPartitionNum(entry.getValue());
vo.getDetailList().add(numVO);
}
vo.setRetentionTime(hotTopic.getRetentionTime());
voList.add(vo);
}
return voList;
}
public static List<PartitionInsufficientTopicVO> convert2PartitionInsufficientTopicVOList(
List<TopicInsufficientPartition> dataList) {
if (ValidateUtils.isEmptyList(dataList)) {
return new ArrayList<>();
}
List<PartitionInsufficientTopicVO> voList = new ArrayList<>();
for (TopicInsufficientPartition elem: dataList) {
PartitionInsufficientTopicVO vo = new PartitionInsufficientTopicVO();
vo.setClusterId(elem.getClusterDO().getId());
vo.setClusterName(elem.getClusterDO().getClusterName());
vo.setTopicName(elem.getTopicName());
vo.setBrokerIdList(elem.getBrokerIdList());
vo.setPresentPartitionNum(elem.getPresentPartitionNum());
vo.setSuggestedPartitionNum(elem.getSuggestedPartitionNum());
vo.setBytesInPerPartition(elem.getBytesInPerPartition());
vo.setMaxAvgBytesInList(elem.getMaxAvgBytesInList());
voList.add(vo);
}
return voList;
}
public static List<AnomalyFlowTopicVO> convert2AnomalyFlowTopicVOList(List<TopicAnomalyFlow> anomalyFlowList) {
if (ValidateUtils.isEmptyList(anomalyFlowList)) {
return new ArrayList<>();
}
List<AnomalyFlowTopicVO> voList = new ArrayList<>();
for (TopicAnomalyFlow anomalyFlow: anomalyFlowList) {
AnomalyFlowTopicVO vo = new AnomalyFlowTopicVO();
vo.setClusterId(anomalyFlow.getClusterId());
vo.setClusterName(anomalyFlow.getClusterName());
vo.setTopicName(anomalyFlow.getTopicName());
vo.setBytesIn(anomalyFlow.getBytesIn());
vo.setBytesInIncr(anomalyFlow.getBytesInIncr());
vo.setIops(anomalyFlow.getIops());
vo.setIopsIncr(anomalyFlow.getIopsIncr());
voList.add(vo);
}
return voList;
}
public static List<ExpiredTopicVO> convert2ExpiredTopicVOList(List<TopicExpiredDO> topicExpiredDOList,
List<ClusterDO> clusterDOList) {
if (ValidateUtils.isEmptyList(topicExpiredDOList)) {
return new ArrayList<>();
}
if (ValidateUtils.isEmptyList(clusterDOList)) {
clusterDOList = new ArrayList<>();
}
Map<Long, String> clusterMap = new HashMap<>(0);
for (ClusterDO clusterDO: clusterDOList) {
clusterMap.put(clusterDO.getId(), clusterDO.getClusterName());
}
List<ExpiredTopicVO> voList = new ArrayList<>();
for (TopicExpiredDO topicExpiredDO: topicExpiredDOList) {
ExpiredTopicVO expiredTopicVO = new ExpiredTopicVO();
expiredTopicVO.setClusterId(topicExpiredDO.getClusterId());
expiredTopicVO.setClusterName(clusterMap.getOrDefault(topicExpiredDO.getClusterId(), ""));
expiredTopicVO.setTopicName(topicExpiredDO.getTopicName());
expiredTopicVO.setExpiredDay(topicExpiredDO.getExpiredDay());
expiredTopicVO.setStatus(topicExpiredDO.getStatus());
voList.add(expiredTopicVO);
}
return voList;
}
}

View File

@@ -0,0 +1,52 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.KafkaAclDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.KafkaUserDO;
import com.xiaojukeji.kafka.manager.common.entity.vo.gateway.KafkaAclVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.gateway.KafkaUserVO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 20/8/3
*/
public class GatewayModelConverter {
public static List<KafkaAclVO> convert2KafkaAclVOList(List<KafkaAclDO> doList) {
if (ValidateUtils.isNull(doList)) {
return new ArrayList<>();
}
List<KafkaAclVO> voList = new ArrayList<>();
for (KafkaAclDO kafkaAclDO: doList) {
KafkaAclVO vo = new KafkaAclVO();
vo.setTopicName(kafkaAclDO.getTopicName());
vo.setTimestamp(kafkaAclDO.getCreateTime().getTime());
vo.setAccess(kafkaAclDO.getAccess());
vo.setUsername(kafkaAclDO.getAppId());
vo.setOperation(kafkaAclDO.getOperation());
voList.add(vo);
}
return voList;
}
public static List<KafkaUserVO> convert2KafkaUserVOList(List<KafkaUserDO> doList) {
if (ValidateUtils.isNull(doList)) {
return new ArrayList<>();
}
List<KafkaUserVO> voList = new ArrayList<>();
for (KafkaUserDO kafkaUserDO: doList) {
KafkaUserVO vo = new KafkaUserVO();
vo.setUsername(kafkaUserDO.getAppId());
vo.setPassword(kafkaUserDO.getPassword());
vo.setTimestamp(kafkaUserDO.getCreateTime().getTime());
vo.setUserType(kafkaUserDO.getUserType());
vo.setOperation(kafkaUserDO.getOperation());
voList.add(vo);
}
return voList;
}
}

View File

@@ -0,0 +1,37 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.KafkaFileVO;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.KafkaFileDO;
import com.xiaojukeji.kafka.manager.service.service.ClusterService;
import java.util.ArrayList;
import java.util.List;
/**
* @author zhongyuankai
* @date 2020/5/8
*/
public class KafkaFileConverter {
public static List<KafkaFileVO> convertKafkaFileVOList(List<KafkaFileDO> kafkaFileDOList, ClusterService clusterService) {
List<KafkaFileVO> kafkaFileVOList = new ArrayList<>();
if (ValidateUtils.isEmptyList(kafkaFileDOList)) {
return kafkaFileVOList;
}
for (KafkaFileDO kafkaFileDO : kafkaFileDOList) {
KafkaFileVO kafkaFileVO = new KafkaFileVO();
CopyUtils.copyProperties(kafkaFileVO, kafkaFileDO);
ClusterDO clusterDO = clusterService.getById(kafkaFileDO.getClusterId());
if (ValidateUtils.isNull(clusterDO)) {
kafkaFileVO.setClusterName("*");
} else {
kafkaFileVO.setClusterName(clusterDO.getClusterName());
}
kafkaFileVOList.add(kafkaFileVO);
}
return kafkaFileVOList;
}
}

View File

@@ -0,0 +1,56 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.dto.rd.LogicalClusterDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster.LogicalClusterVO;
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.LogicalClusterDO;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 20/6/29
*/
public class LogicalClusterModelConverter {
public static LogicalClusterVO convert2LogicalClusterVO(LogicalClusterDO logicalClusterDO) {
if (ValidateUtils.isNull(logicalClusterDO)) {
return null;
}
LogicalClusterVO vo = new LogicalClusterVO();
vo.setLogicalClusterId(logicalClusterDO.getId());
vo.setLogicalClusterName(logicalClusterDO.getName());
vo.setPhysicalClusterId(logicalClusterDO.getClusterId());
vo.setMode(logicalClusterDO.getMode());
vo.setRegionIdList(ListUtils.string2LongList(logicalClusterDO.getRegionList()));
vo.setAppId(logicalClusterDO.getAppId());
vo.setDescription(logicalClusterDO.getDescription());
vo.setGmtCreate(logicalClusterDO.getGmtCreate());
vo.setGmtModify(logicalClusterDO.getGmtModify());
return vo;
}
public static List<LogicalClusterVO> convert2LogicalClusterVOList(List<LogicalClusterDO> doList) {
if (ValidateUtils.isEmptyList(doList)) {
return new ArrayList<>();
}
List<LogicalClusterVO> voList = new ArrayList<>();
for (LogicalClusterDO elem: doList) {
voList.add(convert2LogicalClusterVO(elem));
}
return voList;
}
public static LogicalClusterDO convert2LogicalClusterDO(LogicalClusterDTO dto) {
LogicalClusterDO logicalClusterDO = new LogicalClusterDO();
logicalClusterDO.setName(dto.getName());
logicalClusterDO.setClusterId(dto.getClusterId());
logicalClusterDO.setRegionList(ListUtils.longList2String(dto.getRegionIdList()));
logicalClusterDO.setMode(dto.getMode());
logicalClusterDO.setAppId(dto.getAppId());
logicalClusterDO.setDescription(dto.getDescription());
logicalClusterDO.setId(dto.getId());
return logicalClusterDO;
}
}

View File

@@ -0,0 +1,139 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.monitor.common.entry.dto.MetricPoint;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.AppDO;
import com.xiaojukeji.kafka.manager.monitor.common.entry.dto.MonitorRuleDTO;
import com.xiaojukeji.kafka.manager.monitor.common.entry.vo.*;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.app.AppSummaryVO;
import com.xiaojukeji.kafka.manager.monitor.common.monitor.MonitorAlertDetail;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.monitor.common.entry.*;
import com.xiaojukeji.kafka.manager.common.entity.pojo.MonitorRuleDO;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 20/5/21
*/
public class MonitorRuleConverter {
public static MonitorRuleDetailVO convert2MonitorRuleDetailVO(MonitorRuleDO monitorRuleDO,
MonitorRuleDTO monitorRuleDTO,
AppDO appDO) {
MonitorRuleDetailVO vo = new MonitorRuleDetailVO();
vo.setId(monitorRuleDO.getId());
vo.setName(monitorRuleDO.getName());
vo.setOperator(monitorRuleDO.getOperator());
vo.setCreateTime(monitorRuleDO.getCreateTime().getTime());
vo.setModifyTime(monitorRuleDO.getModifyTime().getTime());
vo.setMonitorRule(monitorRuleDTO);
if (ValidateUtils.isNull(appDO)) {
return vo;
}
AppSummaryVO appSummaryVO = new AppSummaryVO();
appSummaryVO.setAppId(appDO.getAppId());
appSummaryVO.setName(appDO.getName());
appSummaryVO.setPrincipals(appDO.getPrincipals());
vo.setAppSummary(appSummaryVO);
return vo;
}
public static List<MonitorAlertVO> convert2MonitorAlertVOList(List<Alert> alertList) {
if (ValidateUtils.isNull(alertList)) {
return new ArrayList<>();
}
List<MonitorAlertVO> voList = new ArrayList<>();
for (Alert alert: alertList) {
voList.add(convert2MonitorAlertVO(alert));
}
return voList;
}
public static MonitorAlertDetailVO convert2MonitorAlertDetailVO(MonitorAlertDetail monitorAlertDetail) {
MonitorAlertDetailVO monitorAlertDetailVO = new MonitorAlertDetailVO();
monitorAlertDetailVO.setMonitorAlert(convert2MonitorAlertVO(monitorAlertDetail.getAlert()));
monitorAlertDetailVO.setMonitorMetric(convert2MonitorMetricVO(monitorAlertDetail.getMetric()));
return monitorAlertDetailVO;
}
private static MonitorAlertVO convert2MonitorAlertVO(Alert alert) {
if (ValidateUtils.isNull(alert)) {
return null;
}
MonitorAlertVO vo = new MonitorAlertVO();
vo.setAlertId(alert.getId());
vo.setMonitorId(alert.getMonitorId());
vo.setMonitorName(alert.getStrategyName());
vo.setMonitorPriority(alert.getPriority());
vo.setAlertStatus("alert".equals(alert.getType())? 0: 1);
vo.setStartTime(alert.getStartTime() * 1000);
vo.setEndTime(alert.getEndTime() * 1000);
vo.setMetric(alert.getMetric());
vo.setValue(alert.getValue());
vo.setPoints(alert.getPoints());
vo.setGroups(alert.getGroups());
vo.setInfo(alert.getInfo());
return vo;
}
private static MonitorMetricVO convert2MonitorMetricVO(Metric metric) {
MonitorMetricVO vo = new MonitorMetricVO();
vo.setMetric(metric.getMetric());
vo.setStep(metric.getStep());
vo.setValues(new ArrayList<>());
vo.setComparison(metric.getComparison());
vo.setDelta(metric.getDelta());
vo.setOrigin(metric.getOrigin());
for (MetricPoint metricPoint: metric.getValues()) {
vo.getValues().add(new MonitorMetricPoint(metricPoint.getTimestamp(), metricPoint.getValue()));
}
return vo;
}
public static List<MonitorSilenceVO> convert2MonitorSilenceVOList(MonitorRuleDO monitorRuleDO,
List<Silence> silenceList) {
if (ValidateUtils.isNull(silenceList)) {
return new ArrayList<>();
}
List<MonitorSilenceVO> voList = new ArrayList<>();
for (Silence silence: silenceList) {
voList.add(convert2MonitorSilenceVO(monitorRuleDO, silence));
}
return voList;
}
public static MonitorSilenceVO convert2MonitorSilenceVO(MonitorRuleDO monitorRuleDO, Silence silence) {
if (ValidateUtils.isNull(silence)) {
return null;
}
MonitorSilenceVO vo = new MonitorSilenceVO();
vo.setSilenceId(silence.getSilenceId());
vo.setMonitorId(monitorRuleDO.getId());
vo.setMonitorName(monitorRuleDO.getName());
vo.setStartTime(silence.getBeginTime());
vo.setEndTime(silence.getEndTime());
vo.setDescription(silence.getDescription());
return vo;
}
public static List<MonitorNotifyGroupVO> convert2MonitorNotifyGroupVOList(List<NotifyGroup> notifyGroupList) {
if (ValidateUtils.isEmptyList(notifyGroupList)) {
return new ArrayList<>();
}
List<MonitorNotifyGroupVO> voList = new ArrayList<>();
for (NotifyGroup notifyGroup: notifyGroupList) {
MonitorNotifyGroupVO vo = new MonitorNotifyGroupVO();
vo.setId(notifyGroup.getId());
vo.setName(notifyGroup.getName());
vo.setComment(notifyGroup.getComment());
voList.add(vo);
}
return voList;
}
}

View File

@@ -0,0 +1,32 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.bizenum.ModuleEnum;
import com.xiaojukeji.kafka.manager.common.bizenum.OperateEnum;
import com.xiaojukeji.kafka.manager.common.entity.pojo.OperateRecordDO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.OperateRecordVO;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import java.util.ArrayList;
import java.util.List;
public class OperateRecordModelConverter {
public static List<OperateRecordVO> convert2OperateRecordVOList(List<OperateRecordDO> operateRecordDOList) {
if (ValidateUtils.isEmptyList(operateRecordDOList)) {
return new ArrayList<>();
}
List<OperateRecordVO> voList = new ArrayList<>(operateRecordDOList.size());
for (OperateRecordDO operateRecordDO : operateRecordDOList) {
OperateRecordVO vo = new OperateRecordVO();
CopyUtils.copyProperties(vo, operateRecordDO);
vo.setCreateTime(operateRecordDO.getCreateTime().getTime());
vo.setModifyTime(operateRecordDO.getModifyTime().getTime());
vo.setModule(ModuleEnum.valueOf(operateRecordDO.getModuleId()).getMessage());
vo.setOperate(OperateEnum.valueOf(operateRecordDO.getOperateId()).getMessage());
voList.add(vo);
}
return voList;
}
}

View File

@@ -0,0 +1,88 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.ao.account.Account;
import com.xiaojukeji.kafka.manager.bpm.common.OrderResult;
import com.xiaojukeji.kafka.manager.bpm.common.entry.BaseOrderDetailData;
import com.xiaojukeji.kafka.manager.common.entity.vo.common.AccountVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.order.OrderResultVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.order.OrderVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.order.detail.OrderDetailBaseVO;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.OrderDO;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author zengqiao
* @date 19/6/18
*/
public class OrderConverter {
public static List<OrderVO> convert2OrderVOList(List<OrderDO> orderDOList) {
if (orderDOList == null || orderDOList.isEmpty()) {
return new ArrayList<>();
}
List<OrderVO> orderVOList = new ArrayList<>();
for (OrderDO orderDO : orderDOList) {
OrderVO orderVO = convert2OrderVO(orderDO);
if (ValidateUtils.isNull(orderVO)) {
continue;
}
orderVOList.add(orderVO);
}
return orderVOList;
}
public static OrderVO convert2OrderVO(OrderDO orderDO) {
if (ValidateUtils.isNull(orderDO)) {
return null;
}
OrderVO orderVO = new OrderVO();
CopyUtils.copyProperties(orderVO, orderDO);
orderVO.setGmtTime(orderDO.getGmtCreate());
return orderVO;
}
public static OrderDetailBaseVO convert2DetailBaseVO(BaseOrderDetailData baseDTO) {
OrderDetailBaseVO baseVO = new OrderDetailBaseVO();
if (ValidateUtils.isNull(baseDTO)) {
return baseVO;
}
CopyUtils.copyProperties(baseVO, baseDTO);
baseVO.setDetail(baseDTO.getDetail());
AccountVO accountVO = new AccountVO();
CopyUtils.copyProperties(accountVO, baseDTO.getApplicant());
if (!ValidateUtils.isNull(baseDTO.getApplicant()) &&
!ValidateUtils.isNull(baseDTO.getApplicant().getAccountRoleEnum())) {
accountVO.setRole(baseDTO.getApplicant().getAccountRoleEnum().getRole());
}
baseVO.setApplicant(accountVO);
ArrayList<AccountVO> approverList = new ArrayList<>();
for (Account account : baseDTO.getApproverList()) {
AccountVO approver = new AccountVO();
CopyUtils.copyProperties(approver, account);
if (!ValidateUtils.isNull(account.getAccountRoleEnum())) {
approver.setRole(account.getAccountRoleEnum().getRole());
}
approverList.add(approver);
}
baseVO.setApproverList(approverList);
return baseVO;
}
public static List<OrderResultVO> convert2OrderResultVOList(List<OrderResult> orderResultList) {
if (ValidateUtils.isEmptyList(orderResultList)) {
return Collections.emptyList();
}
List<OrderResultVO> voList = new ArrayList<>();
for (OrderResult orderResult : orderResultList) {
OrderResultVO vo = new OrderResultVO();
vo.setId(orderResult.getId());
vo.setResult(orderResult.getResult());
voList.add(vo);
}
return voList;
}
}

View File

@@ -0,0 +1,145 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.bizenum.TaskStatusReassignEnum;
import com.xiaojukeji.kafka.manager.common.entity.ao.reassign.ReassignStatus;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.reassign.ReassignPartitionStatusVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.reassign.ReassignTaskVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.op.reassign.ReassignTopicStatusVO;
import com.xiaojukeji.kafka.manager.common.utils.DateUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.ReassignmentElemData;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ReassignTaskDO;
import kafka.common.TopicAndPartition;
import java.util.*;
/**
* @author zengqiao
* @date 19/4/16
*/
public class ReassignModelConverter {
public static List<ReassignTopicStatusVO> convert2ReassignTopicStatusVOList(List<ReassignStatus> dtoList) {
if (ValidateUtils.isNull(dtoList)) {
return new ArrayList<>();
}
List<ReassignTopicStatusVO> voList = new ArrayList<>();
for (ReassignStatus elem: dtoList) {
ReassignTopicStatusVO vo = new ReassignTopicStatusVO();
vo.setSubTaskId(elem.getSubTaskId());
vo.setClusterId(elem.getClusterId());
vo.setClusterName(elem.getClusterName());
vo.setTopicName(elem.getTopicName());
vo.setStatus(elem.getStatus());
vo.setCompletedPartitionNum(0);
vo.setRealThrottle(elem.getRealThrottle());
vo.setMaxThrottle(elem.getMaxThrottle());
vo.setMinThrottle(elem.getMinThrottle());
vo.setTotalPartitionNum(elem.getReassignList().size());
vo.setReassignList(new ArrayList<>());
if (ValidateUtils.isNull(elem.getReassignStatusMap())) {
elem.setReassignStatusMap(new HashMap<>());
}
for (ReassignmentElemData elemData: elem.getReassignList()) {
ReassignPartitionStatusVO partitionStatusVO = new ReassignPartitionStatusVO();
partitionStatusVO.setPartitionId(elemData.getPartition());
partitionStatusVO.setDestReplicaIdList(elemData.getReplicas());
TaskStatusReassignEnum reassignEnum = elem.getReassignStatusMap().get(
new TopicAndPartition(elem.getTopicName(),
elemData.getPartition())
);
if (!ValidateUtils.isNull(reassignEnum)) {
partitionStatusVO.setStatus(reassignEnum.getCode());
}
if (!ValidateUtils.isNull(reassignEnum) && TaskStatusReassignEnum.isFinished(reassignEnum.getCode())) {
vo.setCompletedPartitionNum(vo.getCompletedPartitionNum() + 1);
}
vo.getReassignList().add(partitionStatusVO);
}
voList.add(vo);
}
return voList;
}
public static List<ReassignTaskVO> convert2ReassignTaskVOList(List<ReassignTaskDO> doList) {
if (ValidateUtils.isEmptyList(doList)) {
return new ArrayList<>();
}
// 同一批任务聚合在一起
Map<Long, List<ReassignTaskDO>> doMap = new TreeMap<>(Collections.reverseOrder());
for (ReassignTaskDO elem: doList) {
List<ReassignTaskDO> subDOList = doMap.getOrDefault(elem.getTaskId(), new ArrayList<>());
subDOList.add(elem);
doMap.put(elem.getTaskId(), subDOList);
}
// 计算这一批任务的状态
List<ReassignTaskVO> voList = new ArrayList<>();
for (Long taskId: doMap.keySet()) {
voList.add(convert2ReassignTaskVO(taskId, doMap.get(taskId)));
}
return voList;
}
public static ReassignTaskVO convert2ReassignTaskVO(Long taskId, List<ReassignTaskDO> doList) {
if (ValidateUtils.isEmptyList(doList)) {
return null;
}
ReassignTaskVO vo = new ReassignTaskVO();
vo.setTaskName(String.format("%s 数据迁移任务", DateUtils.getFormattedDate(taskId)));
vo.setTaskId(taskId);
vo.setTotalTopicNum(doList.size());
vo.setBeginTime(0L);
vo.setEndTime(0L);
Integer completedTopicNum = 0;
Set<Integer> statusSet = new HashSet<>();
for (ReassignTaskDO elem: doList) {
vo.setGmtCreate(elem.getGmtCreate().getTime());
vo.setOperator(elem.getOperator());
vo.setDescription(elem.getDescription());
if (TaskStatusReassignEnum.isFinished(elem.getStatus())) {
completedTopicNum += 1;
statusSet.add(elem.getStatus());
// 任务结束时间
vo.setEndTime(Math.max(elem.getGmtModify().getTime(), vo.getEndTime()));
} else {
statusSet.add(elem.getStatus());
}
// 任务计划开始时间
vo.setBeginTime(elem.getBeginTime().getTime());
}
// 任务整体状态
if (statusSet.contains(TaskStatusReassignEnum.RUNNING.getCode())) {
vo.setStatus(TaskStatusReassignEnum.RUNNING.getCode());
vo.setEndTime(null);
} else if (statusSet.contains(TaskStatusReassignEnum.RUNNABLE.getCode())) {
vo.setStatus(TaskStatusReassignEnum.RUNNABLE.getCode());
vo.setEndTime(null);
} else if (statusSet.contains(TaskStatusReassignEnum.NEW.getCode()) && statusSet.size() == 1) {
// 所有的都是新建状态
vo.setStatus(TaskStatusReassignEnum.NEW.getCode());
vo.setEndTime(null);
} else if (statusSet.contains(TaskStatusReassignEnum.CANCELED.getCode()) && statusSet.size() == 1) {
// 所有的都是取消状态
vo.setStatus(TaskStatusReassignEnum.CANCELED.getCode());
} else if (statusSet.contains(TaskStatusReassignEnum.SUCCEED.getCode()) && statusSet.size() == 1) {
// 所有的都是成功状态
vo.setStatus(TaskStatusReassignEnum.SUCCEED.getCode());
} else if (statusSet.contains(TaskStatusReassignEnum.FAILED.getCode())) {
// 所有的都是成功状态
vo.setStatus(TaskStatusReassignEnum.FAILED.getCode());
} else {
vo.setStatus(TaskStatusReassignEnum.UNKNOWN.getCode());
vo.setEndTime(null);
}
vo.setCompletedTopicNum(completedTopicNum);
return vo;
}
}

View File

@@ -0,0 +1,57 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.pojo.RegionDO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
import com.xiaojukeji.kafka.manager.common.entity.dto.rd.RegionDTO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.RegionVO;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 19/4/3
*/
public class RegionModelConverter {
private static RegionVO convert2RegionVO(RegionDO regionDO) {
if (ValidateUtils.isNull(regionDO)) {
return null;
}
RegionVO regionVO = new RegionVO();
regionVO.setId(regionDO.getId());
regionVO.setClusterId(regionDO.getClusterId());
regionVO.setName(regionDO.getName());
regionVO.setBrokerIdList(ListUtils.string2IntList(regionDO.getBrokerList()));
regionVO.setDescription(regionDO.getDescription());
regionVO.setCapacity(regionDO.getCapacity());
regionVO.setRealUsed(regionDO.getRealUsed());
regionVO.setEstimateUsed(regionDO.getEstimateUsed());
regionVO.setStatus(regionDO.getStatus());
regionVO.setGmtCreate(regionDO.getGmtCreate());
regionVO.setGmtModify(regionDO.getGmtModify());
return regionVO;
}
public static List<RegionVO> convert2RegionVOList(List<RegionDO> doList) {
if (ValidateUtils.isEmptyList(doList)) {
return new ArrayList<>();
}
List<RegionVO> voList = new ArrayList<>();
for (RegionDO elem: doList) {
voList.add(convert2RegionVO(elem));
}
return voList;
}
public static RegionDO convert2RegionDO(RegionDTO dto) {
RegionDO regionDO = new RegionDO();
regionDO.setName(dto.getName());
regionDO.setClusterId(dto.getClusterId());
regionDO.setBrokerList(ListUtils.intList2String(dto.getBrokerIdList()));
regionDO.setDescription(dto.getDescription());
regionDO.setId(dto.getId());
regionDO.setStatus(dto.getStatus());
return regionDO;
}
}

View File

@@ -0,0 +1,75 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.TopicDTO;
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.MineTopicSummary;
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.TopicExpiredData;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic.TopicExpiredVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic.TopicMineVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic.TopicVO;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 20/5/12
*/
public class TopicMineConverter {
public static List<TopicMineVO> convert2TopicMineVOList(List<MineTopicSummary> dtoList) {
if (ValidateUtils.isNull(dtoList)) {
return new ArrayList<>();
}
List<TopicMineVO> voList = new ArrayList<>();
for (MineTopicSummary data: dtoList) {
TopicMineVO vo = new TopicMineVO();
CopyUtils.copyProperties(vo, data);
vo.setClusterId(data.getLogicalClusterId());
vo.setClusterName(data.getLogicalClusterName());
vo.setBytesIn(data.getBytesIn());
vo.setBytesOut(data.getBytesOut());
voList.add(vo);
}
return voList;
}
public static List<TopicVO> convert2TopicVOList(List<TopicDTO> dtoList) {
if (ValidateUtils.isNull(dtoList)) {
return new ArrayList<>();
}
List<TopicVO> voList = new ArrayList<>();
for (TopicDTO data: dtoList) {
TopicVO vo = new TopicVO();
CopyUtils.copyProperties(vo, data);
vo.setClusterId(data.getLogicalClusterId());
vo.setClusterName(data.getLogicalClusterName());
vo.setNeedAuth(data.getNeedAuth());
voList.add(vo);
}
return voList;
}
public static List<TopicExpiredVO> convert2TopicExpiredVOList(List<TopicExpiredData> dataList) {
if (ValidateUtils.isNull(dataList)) {
return new ArrayList<>();
}
List<TopicExpiredVO> voList = new ArrayList<>();
for (TopicExpiredData elem: dataList) {
TopicExpiredVO vo = new TopicExpiredVO();
if (!ValidateUtils.isNull(elem.getLogicalClusterDO())) {
vo.setClusterId(elem.getLogicalClusterDO().getClusterId());
vo.setClusterName(elem.getLogicalClusterDO().getName());
}
vo.setTopicName(elem.getTopicName());
if (!ValidateUtils.isNull(elem.getAppDO())) {
vo.setAppId(elem.getAppDO().getAppId());
vo.setAppName(elem.getAppDO().getName());
vo.setAppPrincipals(elem.getAppDO().getPrincipals());
}
vo.setFetchConnectionNum(elem.getFetchConnectionNum());
voList.add(vo);
}
return voList;
}
}

View File

@@ -0,0 +1,223 @@
package com.xiaojukeji.kafka.manager.web.converters;
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.*;
import com.xiaojukeji.kafka.manager.common.entity.metrics.BaseMetrics;
import com.xiaojukeji.kafka.manager.common.entity.metrics.TopicMetrics;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.TopicBusinessInfoVO;
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic.*;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.TopicBrokerVO;
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.utils.jmx.JmxConstant;
import com.xiaojukeji.kafka.manager.common.entity.pojo.TopicMetricsDO;
import com.xiaojukeji.kafka.manager.service.utils.MetricsConvertUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* @author arthur
* @date 2017/6/1.
*/
public class TopicModelConverter {
public static TopicBasicVO convert2TopicBasicVO(TopicBasicDTO dto, String serviceDiscoveryAddress) {
TopicBasicVO vo = new TopicBasicVO();
vo.setClusterId(dto.getClusterId());
vo.setAppId(dto.getAppId());
vo.setAppName(dto.getAppName());
vo.setPartitionNum(dto.getPartitionNum());
vo.setReplicaNum(dto.getReplicaNum());
vo.setPrincipals(dto.getPrincipals());
vo.setRetentionTime(dto.getRetentionTime());
vo.setCreateTime(dto.getCreateTime());
vo.setModifyTime(dto.getModifyTime());
vo.setScore(dto.getScore());
vo.setTopicCodeC(dto.getTopicCodeC());
vo.setDescription(dto.getDescription());
vo.setBootstrapServers(serviceDiscoveryAddress);
return vo;
}
public static List<TopicPartitionVO> convert2TopicPartitionVOList(List<TopicPartitionDTO> dtoList) {
List<TopicPartitionVO> voList = new ArrayList<>();
for (TopicPartitionDTO dto : dtoList) {
TopicPartitionVO vo = new TopicPartitionVO();
CopyUtils.copyProperties(vo, dto);
if (!ValidateUtils.isNull(dto.getBeginningOffset())
&& !ValidateUtils.isNull(dto.getEndOffset())) {
vo.setMsgNum(dto.getEndOffset() - dto.getBeginningOffset());
}
vo.setReplicaBrokerIdList(dto.getReplicaBrokerIdList());
vo.setIsrBrokerIdList(dto.getIsrBrokerIdList());
voList.add(vo);
}
return voList;
}
public static List<TopicRequestTimeVO> convert2TopicRequestTimeMetricsVOList(List<TopicMetricsDO> metricsList) {
List<TopicRequestTimeVO> voList = new ArrayList<>();
for (TopicMetricsDO elem : metricsList) {
TopicRequestTimeVO vo = new TopicRequestTimeVO();
TopicMetrics metrics = MetricsConvertUtils.convert2TopicMetrics(elem);
vo.setProduceRequestTimeMean(metrics.getSpecifiedMetrics("ProduceTotalTimeMsMean"));
vo.setProduceRequestTime50thPercentile(metrics.getSpecifiedMetrics("ProduceTotalTimeMs50thPercentile"));
vo.setProduceRequestTime75thPercentile(metrics.getSpecifiedMetrics("ProduceTotalTimeMs75thPercentile"));
vo.setProduceRequestTime95thPercentile(metrics.getSpecifiedMetrics("ProduceTotalTimeMs95thPercentile"));
vo.setProduceRequestTime99thPercentile(metrics.getSpecifiedMetrics("ProduceTotalTimeMs99thPercentile"));
vo.setFetchRequestTimeMean(metrics.getSpecifiedMetrics("FetchConsumerTotalTimeMsMean"));
vo.setFetchRequestTime50thPercentile(metrics.getSpecifiedMetrics("FetchConsumerTotalTimeMs50thPercentile"));
vo.setFetchRequestTime75thPercentile(metrics.getSpecifiedMetrics("FetchConsumerTotalTimeMs75thPercentile"));
vo.setFetchRequestTime95thPercentile(metrics.getSpecifiedMetrics("FetchConsumerTotalTimeMs95thPercentile"));
vo.setFetchRequestTime99thPercentile(metrics.getSpecifiedMetrics("FetchConsumerTotalTimeMs99thPercentile"));
vo.setGmtCreate(metrics.getSpecifiedMetrics(JmxConstant.CREATE_TIME, Long.class));
voList.add(vo);
}
return voList;
}
public static List<TopicRequestTimeDetailVO> convert2TopicRequestTimeDetailVOList(BaseMetrics metrics) {
if (ValidateUtils.isNull(metrics)) {
return new ArrayList<>();
}
TopicRequestTimeDetailVO produceVO = new TopicRequestTimeDetailVO();
produceVO.setRequestTimeType("RequestProduceTime");
produceVO.setRequestQueueTimeMs(metrics.getSpecifiedMetrics("ProduceRequestQueueTimeMs99thPercentile"));
produceVO.setResponseQueueTimeMs(metrics.getSpecifiedMetrics("ProduceResponseQueueTimeMs99thPercentile"));
produceVO.setResponseSendTimeMs(metrics.getSpecifiedMetrics("ProduceResponseSendTimeMs99thPercentile"));
produceVO.setLocalTimeMs(metrics.getSpecifiedMetrics("ProduceLocalTimeMs99thPercentile"));
produceVO.setThrottleTimeMs(metrics.getSpecifiedMetrics("ProduceThrottleTimeMs99thPercentile"));
produceVO.setRemoteTimeMs(metrics.getSpecifiedMetrics("ProduceRemoteTimeMs99thPercentile"));
produceVO.setTotalTimeMs(metrics.getSpecifiedMetrics("ProduceTotalTimeMs99thPercentile"));
TopicRequestTimeDetailVO fetchVO = new TopicRequestTimeDetailVO();
fetchVO.setRequestTimeType("RequestFetchTime");
fetchVO.setRequestQueueTimeMs(metrics.getSpecifiedMetrics("FetchConsumerRequestQueueTimeMs99thPercentile"));
fetchVO.setResponseQueueTimeMs(metrics.getSpecifiedMetrics("FetchConsumerResponseQueueTimeMs99thPercentile"));
fetchVO.setResponseSendTimeMs(metrics.getSpecifiedMetrics("FetchConsumerResponseSendTimeMs99thPercentile"));
fetchVO.setLocalTimeMs(metrics.getSpecifiedMetrics("FetchConsumerLocalTimeMs99thPercentile"));
fetchVO.setThrottleTimeMs(metrics.getSpecifiedMetrics("FetchConsumerThrottleTimeMs99thPercentile"));
fetchVO.setRemoteTimeMs(metrics.getSpecifiedMetrics("FetchConsumerRemoteTimeMs99thPercentile"));
fetchVO.setTotalTimeMs(metrics.getSpecifiedMetrics("FetchConsumerTotalTimeMs99thPercentile"));
return Arrays.asList(produceVO, fetchVO);
}
public static List<TopicConnectionVO> convert2TopicConnectionVOList(List<TopicConnection> connectionDTOList) {
if (ValidateUtils.isNull(connectionDTOList)) {
return new ArrayList<>();
}
List<TopicConnectionVO> voList = new ArrayList<>();
for (TopicConnection dto : connectionDTOList) {
TopicConnectionVO vo = new TopicConnectionVO();
CopyUtils.copyProperties(vo, dto);
voList.add(vo);
}
return voList;
}
public static List<TopicBrokerVO> convert2TopicBrokerVO(Long physicalClusterId,
List<TopicBrokerDTO> dtoList) {
List<TopicBrokerVO> voList = new ArrayList<>();
for (TopicBrokerDTO dto : dtoList) {
TopicBrokerVO vo = new TopicBrokerVO();
vo.setClusterId(physicalClusterId);
CopyUtils.copyProperties(vo, dto);
vo.setLeaderPartitionIdList(dto.getLeaderPartitionIdList());
vo.setPartitionIdList(dto.getPartitionIdList());
voList.add(vo);
}
return voList;
}
public static List<TopicDataSampleVO> convert2TopicDataSampleVOList(List<String> dataList) {
if (ValidateUtils.isNull(dataList)) {
return new ArrayList<>();
}
List<TopicDataSampleVO> voList = new ArrayList<>();
for (String data : dataList) {
TopicDataSampleVO topicDataSampleVO = new TopicDataSampleVO();
topicDataSampleVO.setValue(data);
voList.add(topicDataSampleVO);
}
return voList;
}
public static List<TopicMetricVO> convert2TopicMetricsVOList(List<TopicMetricsDO> dataList) {
if (ValidateUtils.isNull(dataList)) {
return new ArrayList<>();
}
List<TopicMetricVO> voList = new ArrayList<>();
for (TopicMetricsDO data : dataList) {
TopicMetricVO vo = new TopicMetricVO();
BaseMetrics metrics = MetricsConvertUtils.convert2TopicMetrics(data);
if (ValidateUtils.isNull(metrics)) {
continue;
}
vo.setBytesInPerSec(metrics.getBytesInPerSecOneMinuteRate(null));
vo.setBytesOutPerSec(metrics.getBytesOutPerSecOneMinuteRate(null));
vo.setBytesRejectedPerSec(metrics.getBytesRejectedPerSecOneMinuteRate(null));
vo.setMessagesInPerSec(metrics.getMessagesInPerSecOneMinuteRate(null));
vo.setTotalProduceRequestsPerSec(metrics.getTotalProduceRequestsPerSecOneMinuteRate(null));
vo.setGmtCreate(data.getGmtCreate().getTime());
voList.add(vo);
}
return voList;
}
public static List<TopicMetricVO> convert2TopicMetricVOList(List<TopicMetricsDTO> dataList) {
if (ValidateUtils.isNull(dataList)) {
return new ArrayList<>();
}
List<TopicMetricVO> voList = new ArrayList<>();
for (TopicMetricsDTO data : dataList) {
TopicMetricVO vo = new TopicMetricVO();
CopyUtils.copyProperties(vo, data);
voList.add(vo);
}
return voList;
}
public static List<TopicAuthorizedAppVO> convert2TopicAuthorizedAppVOList(List<TopicAppData> dtoList) {
if (ValidateUtils.isEmptyList(dtoList)) {
return new ArrayList<>();
}
List<TopicAuthorizedAppVO> voList = new ArrayList<>();
for (TopicAppData topicAppDTO : dtoList) {
TopicAuthorizedAppVO vo = new TopicAuthorizedAppVO();
CopyUtils.copyProperties(vo, topicAppDTO);
voList.add(vo);
}
return voList;
}
public static List<TopicMyAppVO> convert2TopicMineAppVOList(List<TopicAppData> dtoList) {
if (ValidateUtils.isEmptyList(dtoList)) {
return new ArrayList<>();
}
List<TopicMyAppVO> voList = new ArrayList<>();
for (TopicAppData elem : dtoList) {
TopicMyAppVO vo = new TopicMyAppVO();
vo.setAppId(elem.getAppId());
vo.setAppName(elem.getAppName());
vo.setAppPrincipals(elem.getAppPrincipals());
vo.setProduceQuota(elem.getProduceQuota());
vo.setConsumerQuota(elem.getConsumerQuota());
vo.setAccess(elem.getAccess());
voList.add(vo);
}
return voList;
}
public static TopicBusinessInfoVO convert2TopicBusinessInfoVO(TopicBusinessInfo topicBusinessInfo) {
if (ValidateUtils.isNull(topicBusinessInfo)) {
return null;
}
TopicBusinessInfoVO topicBusinessInfoVO = new TopicBusinessInfoVO();
CopyUtils.copyProperties(topicBusinessInfoVO,topicBusinessInfo);
return topicBusinessInfoVO;
}
}

View File

@@ -0,0 +1,33 @@
package com.xiaojukeji.kafka.manager.web.inteceptor;
import com.xiaojukeji.kafka.manager.account.LoginService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.HandlerInterceptor;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* 拦截器
* @author huangyiminghappy, zengqiao
* @date 19/4/29
*/
@Component
public class PermissionInterceptor implements HandlerInterceptor {
@Autowired
private LoginService loginService;
/**
* 拦截预处理
* @return boolean false:拦截, 不向下执行, true:放行
* @author zengqiao
* @date 19/4/29
*/
@Override
public boolean preHandle(HttpServletRequest request,
HttpServletResponse response,
Object handler) throws Exception {
return loginService.checkLogin(request, response);
}
}

View File

@@ -0,0 +1,137 @@
package com.xiaojukeji.kafka.manager.web.inteceptor;
import com.codahale.metrics.Timer;
import com.xiaojukeji.kafka.manager.common.annotations.ApiLevel;
import com.xiaojukeji.kafka.manager.common.constant.ApiLevelContent;
import com.xiaojukeji.kafka.manager.common.entity.DeprecatedResponseResult;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.ao.api.ApiCount;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.web.metrics.MetricsRegistry;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Web Api Metrics信息统计拦截器
* @author zengqiao
* @date 20/1/11
*/
@Aspect
@Component
public class WebMetricsInterceptor {
private static final Logger LOGGER = LoggerFactory.getLogger(WebMetricsInterceptor.class);
/**
* 各级别接口允许的最大线程数
*/
private static final Map<Integer, ApiCount> API_COUNT_MAP = new ConcurrentHashMap<>();
public WebMetricsInterceptor(@Value("${server.tomcat.max-threads}") int maxThreads) {
// 0级端口不限流
API_COUNT_MAP.put(
ApiLevelContent.LEVEL_VIP_1,
new ApiCount(ApiLevelContent.LEVEL_VIP_1, Integer.MAX_VALUE, new AtomicInteger(0))
);
// 1级端口
API_COUNT_MAP.put(
ApiLevelContent.LEVEL_IMPORTANT_2,
new ApiCount(ApiLevelContent.LEVEL_IMPORTANT_2,maxThreads / 4, new AtomicInteger(0))
);
// 2级端口
API_COUNT_MAP.put(
ApiLevelContent.LEVEL_NORMAL_3,
new ApiCount(ApiLevelContent.LEVEL_NORMAL_3,maxThreads / 8, new AtomicInteger(0))
);
// 其他接口
API_COUNT_MAP.put(
ApiLevelContent.LEVEL_DEFAULT_4,
new ApiCount(ApiLevelContent.LEVEL_DEFAULT_4,maxThreads / 4, new AtomicInteger(0))
);
}
/**
* 切入点
*/
private static final String PointCut = "execution(* com.xiaojukeji.kafka.manager.web.api..*.*(..))";
@Pointcut(value = PointCut)
public void pointcut() {
}
@Around("pointcut()")
public Object doAround(ProceedingJoinPoint proceedingJoinPoint) throws Throwable {
long beginTime = System.currentTimeMillis();
int apiLevel = ApiLevelContent.LEVEL_DEFAULT_4;
// 获取方法的api level
MethodSignature msig = (MethodSignature) proceedingJoinPoint.getSignature();
String methodName = msig.getName();
Object target = proceedingJoinPoint.getTarget();
Method currentMethod = target.getClass().getMethod(methodName, msig.getParameterTypes());
ApiLevel annotation = currentMethod.getAnnotation(ApiLevel.class);
if (!ValidateUtils.isNull(annotation)) {
apiLevel = annotation.level();
}
ApiCount apiCount = API_COUNT_MAP.get(apiLevel);
if (ValidateUtils.isNull(apiCount)) {
apiCount = API_COUNT_MAP.get(ApiLevelContent.LEVEL_DEFAULT_4);
}
Object methodResult = null;
try {
if (apiCount.incAndCheckIsOverFlow()) {
return returnErrorDirect(methodName, apiCount);
}
methodResult = proceedingJoinPoint.proceed();
} catch (Throwable t) {
LOGGER.error("error occurred when proceed method:{}.", methodName, t);
} finally {
apiCount.decPresentNum();
metricsRecord(methodName, beginTime);
}
return methodResult;
}
private Object returnErrorDirect(String methodName, ApiCount apiCount) {
LOGGER.warn("api limiter, methodName:{}, apiLevel:{}, currentNum:{}, maxNum:{}, return directly.",
methodName, apiCount.getApiLevel(), apiCount.getCurrentNum(), apiCount.getMaxNum());
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
String uri = attributes.getRequest().getRequestURI();
if (uri.contains("gateway/api/v1")) {
return DeprecatedResponseResult.failure("api limited");
}
return new Result<>(ResultStatus.OPERATION_FORBIDDEN);
}
private void metricsRecord(String methodName, long startTime) {
long costTime = System.currentTimeMillis() - startTime;
String metricsName = methodName.substring(0, 1).toUpperCase() + methodName.substring(1) + "-Timer";
Timer apiTimer = MetricsRegistry.newTimer(metricsName);
apiTimer.update(costTime, TimeUnit.MILLISECONDS);
Timer sumTimer = MetricsRegistry.newTimer("All_Timer");
sumTimer.update(costTime, TimeUnit.MILLISECONDS);
}
}

View File

@@ -0,0 +1,88 @@
package com.xiaojukeji.kafka.manager.web.metrics;
import com.codahale.metrics.*;
import com.xiaojukeji.kafka.manager.common.constant.LogConstant;
import com.xiaojukeji.kafka.manager.common.utils.factory.DefaultThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.text.DecimalFormat;
import java.util.Map;
import java.util.SortedMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* @author zengqiao
* @date 20/7/31
*/
@Component
public class MetricsRegistry {
private static final Logger LOGGER = LoggerFactory.getLogger(LogConstant.API_METRICS_LOGGER);
private static final DecimalFormat DECIMAL_FORMAT = new DecimalFormat("#");
private static final ScheduledExecutorService PRINT_API_METRICS_THREAD =
Executors.newSingleThreadScheduledExecutor(new DefaultThreadFactory("PrintApiMetricsThread"));
private static final MetricRegistry METRIC_REGISTRY = new MetricRegistry();
@PostConstruct
public void init() {
PRINT_API_METRICS_THREAD.scheduleAtFixedRate(() -> {
try {
printTimerMetrics();
} catch (Throwable ex) {
}
}, 10, 10, TimeUnit.SECONDS);
}
public static Timer newTimer(String metricName) {
return METRIC_REGISTRY.timer(metricName);
}
synchronized private void printTimerMetrics() {
SortedMap<String, Timer> timerMap = METRIC_REGISTRY.getTimers();
for (Map.Entry<String, Timer> entry: timerMap.entrySet()) {
final Snapshot snapshot = entry.getValue().getSnapshot();
LOGGER.info("type=TIMER, name={}-Count, value={}, unit=event",
entry.getKey(), entry.getValue().getCount());
LOGGER.info("type=TIMER, name={}-MeanRate, value={}, unit=qps",
entry.getKey(), DECIMAL_FORMAT.format(entry.getValue().getMeanRate()));
LOGGER.info("type=TIMER, name={}-M1Rate, value={}, unit=qps",
entry.getKey(), DECIMAL_FORMAT.format(entry.getValue().getOneMinuteRate()));
LOGGER.info("type=TIMER, name={}-M5Rate, value={}, unit=qps",
entry.getKey(), DECIMAL_FORMAT.format(entry.getValue().getFiveMinuteRate()));
LOGGER.info("type=TIMER, name={}-M15Rate, value={}, unit=qps",
entry.getKey(), DECIMAL_FORMAT.format(entry.getValue().getFifteenMinuteRate()));
LOGGER.info("type=TIMER, name={}-Min, value={}, unit=ms",
entry.getKey(), convertUnitFromNs2Ms(snapshot.getMin()));
LOGGER.info("type=TIMER, name={}-Max, value={}, unit=ms",
entry.getKey(), convertUnitFromNs2Ms(snapshot.getMax()));
LOGGER.info("type=TIMER, name={}-Mean, value={}, unit=ms",
entry.getKey(), convertUnitFromNs2Ms(snapshot.getMean()));
LOGGER.info("type=TIMER, name={}-StdDev, value={}, unit=ms",
entry.getKey(), convertUnitFromNs2Ms(snapshot.getStdDev()));
LOGGER.info("type=TIMER, name={}-50thPercentile, value={}, unit=ms",
entry.getKey(), convertUnitFromNs2Ms(snapshot.getMedian()));
LOGGER.info("type=TIMER, name={}-75thPercentile, value={}, unit=ms",
entry.getKey(), convertUnitFromNs2Ms(snapshot.get75thPercentile()));
LOGGER.info("type=TIMER, name={}-95thPercentile, value={}, unit=ms",
entry.getKey(), convertUnitFromNs2Ms(snapshot.get95thPercentile()));
LOGGER.info("type=TIMER, name={}-98thPercentile, value={}, unit=ms",
entry.getKey(), convertUnitFromNs2Ms(snapshot.get98thPercentile()));
LOGGER.info("type=TIMER, name={}-99thPercentile, value={}, unit=ms",
entry.getKey(), convertUnitFromNs2Ms(snapshot.get99thPercentile()));
LOGGER.info("type=TIMER, name={}-999thPercentile, value={}, unit=ms",
entry.getKey(), convertUnitFromNs2Ms(snapshot.get999thPercentile()));
}
}
private String convertUnitFromNs2Ms(double value) {
return DECIMAL_FORMAT.format(value / 1000000);
}
}

View File

@@ -0,0 +1,25 @@
package com.xiaojukeji.kafka.manager.web.utils;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import java.util.concurrent.TimeUnit;
/**
* @author zengqiao
* @date 20/9/3
*/
public class ResultCache {
private static final Cache<String, Result> CONTROLLER_RESULT_CACHE = Caffeine.newBuilder()
.maximumSize(10)
.expireAfterWrite(60, TimeUnit.SECONDS).build();
public static void put(String key, Result result) {
CONTROLLER_RESULT_CACHE.put(key, result);
}
public static Result get(String key) {
return CONTROLLER_RESULT_CACHE.getIfPresent(key);
}
}