初始化3.0.0版本

This commit is contained in:
zengqiao
2022-08-18 17:04:05 +08:00
parent 462303fca0
commit 51832385b1
2446 changed files with 93177 additions and 127211 deletions

187
km-rest/pom.xml Normal file
View File

@@ -0,0 +1,187 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>km-rest</artifactId>
<version>${km.revision}</version>
<packaging>jar</packaging>
<parent>
<artifactId>km</artifactId>
<groupId>com.xiaojukeji.kafka</groupId>
<version>${km.revision}</version>
</parent>
<properties>
<failOnMissingWebXml>false</failOnMissingWebXml>
<log4j2.version>2.16.0</log4j2.version>
<springboot.version>2.3.7.RELEASE</springboot.version>
<spring.version>5.3.18</spring.version>
</properties>
<dependencies>
<dependency>
<groupId>com.xiaojukeji.kafka</groupId>
<artifactId>km-account</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>com.xiaojukeji.kafka</groupId>
<artifactId>km-monitor</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>com.xiaojukeji.kafka</groupId>
<artifactId>km-core</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>com.xiaojukeji.kafka</groupId>
<artifactId>km-biz</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>com.xiaojukeji.kafka</groupId>
<artifactId>km-task</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>com.xiaojukeji.kafka</groupId>
<artifactId>km-collector</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
</dependency>
<!-- spring-boot -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<version>${springboot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
<version>${springboot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
<version>${springboot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
<version>${springboot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-validation</artifactId>
<version>${springboot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<version>${springboot.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
</dependency>
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>io.github.zqrferrari</groupId>
<artifactId>logi-security-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>io.micrometer</groupId>
<artifactId>micrometer-registry-prometheus</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-actuator-autoconfigure</artifactId>
<version>${springboot.version}</version>
</dependency>
</dependencies>
<build>
<finalName>ks-km</finalName>
<plugins>
<!-- 获取git.properties -->
<plugin>
<groupId>pl.project13.maven</groupId>
<artifactId>git-commit-id-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>revision</goal>
</goals>
</execution>
</executions>
<configuration>
<verbose>true</verbose>
<dateFormat>yyyy-MM-dd'T'HH:mm:ssZ</dateFormat>
<generateGitPropertiesFile>true</generateGitPropertiesFile>
<generateGitPropertiesFilename>${project.build.outputDirectory}/git.properties</generateGitPropertiesFilename>
<format>json</format>
<includeOnlyProperties>
<includeOnlyProperty>git.branch</includeOnlyProperty>
<includeOnlyProperty>git.build.version</includeOnlyProperty>
<includeOnlyProperty>git.commit.id</includeOnlyProperty>
<includeOnlyProperty>git.commit.id.abbrev</includeOnlyProperty>
<includeOnlyProperty>git.commit.time</includeOnlyProperty>
<includeOnlyProperty>git.build.time</includeOnlyProperty>
</includeOnlyProperties>
</configuration>
</plugin>
<!-- 打jar包 -->
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>${springboot.version}</version>
<configuration>
<includeSystemScope>true</includeSystemScope>
</configuration>
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,44 @@
package com.xiaojukeji.know.streaming.km.rest;
import io.micrometer.core.instrument.MeterRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.actuate.autoconfigure.metrics.MeterRegistryCustomizer;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.transaction.annotation.EnableTransactionManagement;
/**
* 启动SpringBoot
*/
@EnableAsync
@EnableScheduling
@ServletComponentScan
@EnableTransactionManagement
@SpringBootApplication(scanBasePackages = {"com.xiaojukeji.know.streaming.km"})
public class KnowStreaming {
private static final Logger LOGGER = LoggerFactory.getLogger(KnowStreaming.class);
public static void main(String[] args) {
try {
SpringApplication sa = new SpringApplication(KnowStreaming.class);
sa.run(args);
LOGGER.info("KnowStreaming-KM started");
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* register prometheus
*/
@Bean
MeterRegistryCustomizer<MeterRegistry> configurer(@Value("${spring.application.name}") String applicationName){
return registry -> registry.config().commonTags("application", applicationName);
}
}

View File

@@ -0,0 +1,47 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.acl;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.biz.kafkaacl.KafkaAclManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.acl.AclAtomDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.converter.KafkaAclConverter;
import com.xiaojukeji.know.streaming.km.core.service.acl.OpKafkaAclService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 22/02/23
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "ACL-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class AclController {
@Autowired
private KafkaAclManager kafkaAclManager;
@Autowired
private OpKafkaAclService opKafkaAclService;
@ApiOperation(value = "KafkaACL创建", notes = "")
@PostMapping(value ="kafka-acls/batch")
@ResponseBody
public Result<Void> createKafkaAcl(@Validated @RequestBody List<AclAtomDTO> dtoList) {
return kafkaAclManager.batchCreateKafkaAcl(dtoList, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "KafkaACL删除", notes = "")
@DeleteMapping(value ="kafka-acls")
@ResponseBody
public Result<Void> deleteKafkaAcl(@Validated @RequestBody AclAtomDTO dto) {
return opKafkaAclService.deleteKafkaAcl(KafkaAclConverter.convert2ACLAtomParam(dto), HttpRequestUtil.getOperator());
}
}

View File

@@ -0,0 +1,80 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.broker;
import com.xiaojukeji.know.streaming.km.biz.broker.BrokerManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.broker.Broker;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
import com.xiaojukeji.know.streaming.km.common.bean.vo.broker.BrokerBasicVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.log.LogDirVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metadata.BrokerMetadataCombineExistVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metadata.BrokerMetadataVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 22/02/23
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Broker-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class BrokerController {
@Autowired
private BrokerService brokerService;
@Autowired
private BrokerManager brokerManager;
@ApiOperation(value = "Broker元信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/brokers/{brokerId}/metadata")
@ResponseBody
public Result<BrokerMetadataVO> getBrokerMetadata(@PathVariable Long clusterPhyId, @PathVariable Integer brokerId) {
Broker broker = brokerService.getBroker(clusterPhyId, brokerId);
if (broker == null) {
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getBrokerNotExist(clusterPhyId, brokerId));
}
return Result.buildSuc(new BrokerMetadataVO(broker.getBrokerId(), broker.getHost()));
}
@ApiOperation(value = "Broker元信息", notes = "带是否存在及是否存活")
@GetMapping(value = "clusters/{clusterPhyId}/brokers/{brokerId}/metadata-combine-exist")
@ResponseBody
public Result<BrokerMetadataCombineExistVO> getBrokerMetadataCombineExist(@PathVariable Long clusterPhyId, @PathVariable Integer brokerId) {
Broker broker = brokerService.getBroker(clusterPhyId, brokerId);
if (broker == null) {
BrokerMetadataCombineExistVO vo = new BrokerMetadataCombineExistVO();
vo.setBrokerId(brokerId);
vo.setAlive(false);
vo.setExist(false);
return Result.buildSuc(vo);
}
return Result.buildSuc(new BrokerMetadataCombineExistVO(broker.getBrokerId(), broker.getHost(), true, broker.alive()));
}
@ApiOperation(value = "Broker基本信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/brokers/{brokerId}/basic")
@ResponseBody
public Result<BrokerBasicVO> getBrokerBasic(@PathVariable Long clusterPhyId, @PathVariable Integer brokerId) {
return brokerManager.getBrokerBasic(clusterPhyId, brokerId);
}
@ApiOperation(value = "BrokerLogs信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/brokers/{brokerId}/log-dirs")
@ResponseBody
public PaginationResult<LogDirVO> getBrokerLogDirs(@PathVariable Long clusterPhyId,
@PathVariable Integer brokerId,
PaginationBaseDTO dto) {
return brokerManager.getBrokerLogDirs(clusterPhyId, brokerId, dto);
}
}

View File

@@ -0,0 +1,62 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.broker;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricsBrokerDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BaseMetrics;
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BrokerMetrics;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerMetricService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 22/02/23
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "BrokerMetric-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class BrokerMetricController {
@Autowired
private BrokerMetricService brokerMetricService;
@ApiOperation(value = "物理集群指标信息")
@PostMapping(value = "clusters/{clusterPhyId}/broker-metrics")
@ResponseBody
public Result<List<MetricMultiLinesVO>> getBrokerMetricsOverview(
@PathVariable Long clusterPhyId, @RequestBody MetricsBrokerDTO param) {
return brokerMetricService.listBrokerMetricsFromES(clusterPhyId, param);
}
@ApiOperation(value = "Broker指标-单个Broker")
@PostMapping(value = "clusters/{clusterPhyId}/brokers/{brokerId}/metric-points")
@ResponseBody
public Result<List<MetricPointVO>> getBrokerMetricPoints(@PathVariable Long clusterPhyId,
@PathVariable Integer brokerId,
@RequestBody MetricDTO dto) {
return brokerMetricService.getMetricPointsFromES(clusterPhyId, brokerId, dto);
}
@ApiOperation(value = "物理集群-最近指标", notes = "")
@PostMapping(value = "clusters/{clusterPhyId}/brokers/{brokerId}/latest-metrics")
@ResponseBody
public Result<BaseMetrics> getLatestBrokerMetrics(@PathVariable Long clusterPhyId,
@PathVariable Integer brokerId,
@RequestBody List<String> metricsNames) {
Result<BrokerMetrics> metricsResult = brokerMetricService.getLatestMetricsFromES(clusterPhyId, brokerId);
if (metricsResult.failed()) {
return Result.buildFromIgnoreData(metricsResult);
}
return Result.buildSuc(metricsResult.getData());
}
}

View File

@@ -0,0 +1,38 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.changerecord;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.po.changerecord.KafkaChangeRecordPO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.changerecord.KafkaChangeRecordVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.converter.ChangeRecordVOConverter;
import com.xiaojukeji.know.streaming.km.core.service.change.record.KafkaChangeRecordService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 22/04/07
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "集群Change记录-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class KafkaChangeRecordController {
@Autowired
private KafkaChangeRecordService kafkaChangeRecordService;
@ApiOperation(value = "集群变更记录")
@GetMapping(value = "clusters/{clusterPhyId}/change-records")
@ResponseBody
public PaginationResult<KafkaChangeRecordVO> getClusterPhyChangeRecords(@PathVariable Long clusterPhyId, PaginationBaseDTO dto) {
IPage<KafkaChangeRecordPO> iPage = kafkaChangeRecordService.pagingByCluster(clusterPhyId, dto);
return PaginationResult.buildSuc(
ChangeRecordVOConverter.convert2KafkaChangeRecordVOList(iPage.getRecords()),
iPage
);
}
}

View File

@@ -0,0 +1,65 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.cluster;
import com.xiaojukeji.know.streaming.km.biz.cluster.ClusterBrokersManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.ClusterBrokersOverviewDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.broker.Broker;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.res.ClusterBrokersOverviewVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.res.ClusterBrokersStateVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metadata.BrokerMetadataVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.common.utils.PaginationUtil;
import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.Arrays;
import java.util.List;
/**
* @author zengqiao
* @date 22/02/21
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "集群Brokers-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class ClusterBrokersController {
@Autowired
private BrokerService brokerService;
@Autowired
private ClusterBrokersManager clusterBrokersManager;
@ApiOperation(value = "集群Brokers元信息")
@GetMapping(value = "clusters/{clusterPhyId}/brokers-metadata")
@ResponseBody
public Result<List<BrokerMetadataVO>> getClusterPhyBrokersMetadata(@PathVariable Long clusterPhyId,
@RequestParam(required = false) String searchKeyword) {
List<Broker> brokerList = brokerService.listAllBrokersFromDB(clusterPhyId);
return Result.buildSuc(
PaginationUtil.pageByFuzzyFilter(ConvertUtil.list2List(brokerList, BrokerMetadataVO.class), searchKeyword, Arrays.asList("host"))
);
}
@ApiOperation(value = "集群brokers状态信息")
@GetMapping(value = "clusters/{clusterPhyId}/brokers-state")
@ResponseBody
public Result<ClusterBrokersStateVO> getClusterPhyBrokersState(@PathVariable Long clusterPhyId) {
return Result.buildSuc(clusterBrokersManager.getClusterPhyBrokersState(clusterPhyId));
}
@ApiOperation(value = "集群brokers信息列表")
@PostMapping(value = "clusters/{clusterPhyId}/brokers-overview")
@ResponseBody
public PaginationResult<ClusterBrokersOverviewVO> getClusterPhyBrokersOverview(@PathVariable Long clusterPhyId,
@RequestBody ClusterBrokersOverviewDTO dto) {
return clusterBrokersManager.getClusterPhyBrokersOverview(clusterPhyId, dto);
}
}

View File

@@ -0,0 +1,93 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.cluster;
import com.xiaojukeji.know.streaming.km.biz.group.GroupManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.ClusterGroupsOverviewDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricGroupPartitionDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.field.PaginationFuzzySearchFieldDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.TopicPartitionKS;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.group.GroupTopicOverviewVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.utils.Tuple;
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
import com.xiaojukeji.know.streaming.km.core.service.group.GroupMetricService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Set;
/**
* @author zengqiao
* @date 22/02/23
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "集群Groups-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class ClusterGroupsController {
@Autowired
private GroupManager groupManager;
@Autowired
private GroupMetricService groupMetricService;
@ApiOperation(value = "集群Groups信息列表")
@PostMapping(value = "clusters/{clusterPhyId}/groups-overview")
@ResponseBody
public PaginationResult<GroupTopicOverviewVO> getClusterPhyGroupsOverview(@PathVariable Long clusterPhyId,
@RequestBody ClusterGroupsOverviewDTO dto) {
Tuple<String, String> searchKeyTuple = this.getSearchKeyWords(dto);
return groupManager.pagingGroupMembers(
clusterPhyId,
dto.getTopicName(),
dto.getGroupName(),
searchKeyTuple.getV1(),
searchKeyTuple.getV2(),
dto
);
}
@ApiOperation(value = "集群Groups指标信息")
@PostMapping(value = "clusters/{clusterPhyId}/group-metrics")
@ResponseBody
public Result<List<MetricMultiLinesVO>> getClusterPhyGroupMetrics(@PathVariable Long clusterPhyId, @RequestBody MetricGroupPartitionDTO param) {
return groupMetricService.listGroupMetricsFromES(clusterPhyId, param);
}
@ApiOperation(value = "Groups消费过的Partition", notes = "startTime和endTime表示查询的时间范围")
@GetMapping(value = "clusters/{clusterPhyId}/groups/{groupName}/partitions")
@ResponseBody
public Result<Set<TopicPartitionKS>> getClusterPhyGroupPartitions(@PathVariable Long clusterPhyId,
@PathVariable String groupName,
@RequestParam Long startTime,
@RequestParam Long endTime) {
return groupManager.listClusterPhyGroupPartitions(clusterPhyId, groupName, startTime, endTime);
}
/**************************************************** private method ****************************************************/
private Tuple<String, String> getSearchKeyWords(ClusterGroupsOverviewDTO dto) {
if (ValidateUtils.isEmptyList(dto.getFuzzySearchDTOList())) {
return new Tuple<>("", "");
}
String searchTopicName = "";
String searchGroupName = "";
for (PaginationFuzzySearchFieldDTO searchFieldDTO: dto.getFuzzySearchDTOList()) {
if (searchFieldDTO.getFieldName().equals("topicName")) {
searchTopicName = searchFieldDTO.getFieldValue();
}
if (searchFieldDTO.getFieldName().equals("groupName")) {
searchGroupName = searchFieldDTO.getFieldValue();
}
}
return new Tuple<>(searchTopicName, searchGroupName);
}
}

View File

@@ -0,0 +1,123 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.cluster;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricsClusterPhyDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BaseMetrics;
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.ClusterMetrics;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricLineVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiValuePointVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterMetricService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author zengqiao
* @date 22/02/21
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "集群Metrics-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class ClusterMetricsController {
@Autowired
private ClusterMetricService clusterMetricService;
@ApiOperation(value = "物理集群-最近指标", notes = "")
@PostMapping(value = "physical-clusters/{clusterPhyId}/latest-metrics")
@ResponseBody
public Result<BaseMetrics> getLatestClusterMetrics(@PathVariable Long clusterPhyId, @RequestBody List<String> metricsNames) {
Result<ClusterMetrics> metricsResult = clusterMetricService.getLatestMetricsFromES(clusterPhyId, metricsNames);
if (metricsResult.failed()) {
return Result.buildFromIgnoreData(metricsResult);
}
return Result.buildSuc(metricsResult.getData());
}
@ApiOperation(value = "物理集群-指标时刻信息", notes = "获取时间区间的单点值")
@PostMapping(value = "physical-clusters/{clusterPhyId}/metric-points")
@ResponseBody
public Result<List<MetricPointVO>> getClusterMetricPoints(@PathVariable Long clusterPhyId, @RequestBody MetricDTO dto) {
return clusterMetricService.getMetricPointsFromES(clusterPhyId, dto);
}
@ApiOperation(value = "物理集群-多指标历史信息(单点单指标)", notes = "多条指标线")
@PostMapping(value = "physical-clusters/metrics")
@ResponseBody
public Result<List<MetricMultiLinesVO>> getClusterPhyMetrics(@RequestBody MetricsClusterPhyDTO param) {
return clusterMetricService.listClusterMetricsFromES(param);
}
@ApiOperation(value = "物理集群-多指标历史信息(单点多指标)", notes = "一条指标线每个指标点包含多个指标值")
@PostMapping(value = "physical-clusters/metrics-multi-value")
@ResponseBody
public Result<List<MetricMultiValuePointVO>> getClusterPhyMetricsMultiValues(
@RequestBody MetricsClusterPhyDTO param) {
return metricMultiLinesVO2MetricMultiValuePointVO(clusterMetricService.listClusterMetricsFromES(param));
}
private Result<List<MetricMultiValuePointVO>> metricMultiLinesVO2MetricMultiValuePointVO(
Result<List<MetricMultiLinesVO>> ret){
if(ret.failed()){return Result.buildFromIgnoreData(ret);}
List<MetricMultiValuePointVO> metricMultiValuePointVOS = new ArrayList<>();
List<MetricMultiLinesVO> metricMultiLinesVOS = ret.getData();
if(CollectionUtils.isEmpty(metricMultiLinesVOS)){
return Result.buildSuc(metricMultiValuePointVOS);
}
Map<Long/*timestamp*/, Map<String/*metricName*/, String/*value*/>> map = new HashMap<>();
for(MetricMultiLinesVO multiLinesVO : metricMultiLinesVOS){
String metricName = multiLinesVO.getMetricName();
if(CollectionUtils.isEmpty(multiLinesVO.getMetricLines())){
continue;
}
//集群
MetricLineVO metricLineVO = multiLinesVO.getMetricLines().get(0);
List<MetricPointVO> metricPoints = metricLineVO.getMetricPoints();
for(MetricPointVO metricPointVO : metricPoints){
Long timestamp = metricPointVO.getTimeStamp();
String value = metricPointVO.getValue();
Map<String/*metricName*/, String/*value*/> metricMap
= (null == map.get(timestamp))
? new HashMap<>()
: map.get(timestamp);
metricMap.put(metricName, value);
map.put(timestamp, metricMap);
}
}
for(Long timestamp : map.keySet()){
MetricMultiValuePointVO metricMultiValuePointVO = new MetricMultiValuePointVO();
metricMultiValuePointVO.setTimeStamp(timestamp);
metricMultiValuePointVO.setValues(map.get(timestamp));
metricMultiValuePointVOS.add(metricMultiValuePointVO);
}
return Result.buildSuc(metricMultiValuePointVOS);
}
}

View File

@@ -0,0 +1,87 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.cluster;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.ClusterPhyAddDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.ClusterPhyModifyDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.ClusterPhyBaseCombineExistVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.ClusterPhyBaseVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
import com.xiaojukeji.know.streaming.km.common.converter.ClusterConverter;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 22/02/21
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "集群单Phy-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class ClusterPhyController {
@Autowired
private ClusterPhyService clusterPhyService;
@ApiOperation(value = "接入物理集群", notes = "")
@PostMapping(value = "physical-clusters")
@ResponseBody
public Result<Long> addClusterPhy(@Validated @RequestBody ClusterPhyAddDTO dto) throws Exception {
return Result.buildSuc(
clusterPhyService.addClusterPhy(ClusterConverter.convert2ClusterPhyPO(dto), HttpRequestUtil.getOperator())
);
}
@ApiOperation(value = "删除物理集群")
@DeleteMapping(value = "physical-clusters")
@ResponseBody
public Result<Void> deleteClusterPhy(@RequestParam("clusterPhyId") Long clusterPhyId) {
return clusterPhyService.removeClusterPhyById(clusterPhyId, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "修改物理集群", notes = "")
@PutMapping(value = "physical-clusters")
@ResponseBody
public Result<Void> modifyClusterPhyById(@Validated @RequestBody ClusterPhyModifyDTO dto) throws Exception {
clusterPhyService.modifyClusterPhyById(ClusterConverter.convert2ClusterPhyPO(dto), HttpRequestUtil.getOperator());
return Result.buildSuc();
}
@ApiOperation(value = "物理集群基本信息", notes = "")
@GetMapping(value = "physical-clusters/{clusterPhyId}/basic")
@ResponseBody
public Result<ClusterPhyBaseVO> getClusterPhyBasic(@PathVariable Long clusterPhyId) {
ClusterPhy clusterPhy = clusterPhyService.getClusterByCluster(clusterPhyId);
if (clusterPhy == null) {
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getClusterPhyNotExist(clusterPhyId));
}
return Result.buildSuc(ConvertUtil.obj2Obj(clusterPhy, ClusterPhyBaseVO.class));
}
@ApiOperation(value = "物理集群基本信息", notes = "")
@GetMapping(value = "physical-clusters/{clusterPhyName}/basic-combine-exist")
@ResponseBody
public Result<ClusterPhyBaseCombineExistVO> getClusterPhyBasicCombineExist(@PathVariable String clusterPhyName) {
ClusterPhy clusterPhy = clusterPhyService.getClusterByClusterName(clusterPhyName);
if (clusterPhy == null) {
ClusterPhyBaseCombineExistVO vo = new ClusterPhyBaseCombineExistVO();
vo.setExist(false);
return Result.buildSuc(vo);
}
ClusterPhyBaseCombineExistVO vo = ConvertUtil.obj2Obj(clusterPhy, ClusterPhyBaseCombineExistVO.class);
vo.setExist(true);
return Result.buildSuc(vo);
}
}

View File

@@ -0,0 +1,90 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.cluster;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationPreciseAndFuzzySearchDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.field.PaginationFuzzySearchFieldDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.field.PaginationPreciseFilterFieldDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.po.KafkaAclPO;
import com.xiaojukeji.know.streaming.km.common.bean.po.KafkaUserPO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.acl.AclBindingVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.kafkauser.KafkaUserVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.converter.KafkaAclConverter;
import com.xiaojukeji.know.streaming.km.common.converter.KafkaUserVOConverter;
import com.xiaojukeji.know.streaming.km.common.utils.PaginationUtil;
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
import com.xiaojukeji.know.streaming.km.core.service.acl.KafkaAclService;
import com.xiaojukeji.know.streaming.km.core.service.kafkauser.KafkaUserService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.Arrays;
import java.util.List;
/**
* @author zengqiao
* @date 22/02/23
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "集群Security-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class ClusterSecuritiesController {
@Autowired
private KafkaAclService kafkaAclService;
@Autowired
private KafkaUserService kafkaUserService;
@ApiOperation(value = "集群所有ACL信息")
@PostMapping(value = "clusters/{clusterPhyId}/acl-bindings")
@ResponseBody
public PaginationResult<AclBindingVO> aclBindings(@PathVariable Long clusterPhyId,
@RequestBody @Validated PaginationPreciseAndFuzzySearchDTO dto) {
List<KafkaAclPO> poList = kafkaAclService.getKafkaAclFromDB(clusterPhyId);
List<AclBindingVO> voList = KafkaAclConverter.convert2AclBindingVOList(poList);
// 精确搜索
if (!ValidateUtils.isEmptyList(dto.getPreciseFilterDTOList())) {
for (PaginationPreciseFilterFieldDTO preciseFilterFieldDTO: dto.getPreciseFilterDTOList()) {
voList = PaginationUtil.pageByPreciseFilter(voList, preciseFilterFieldDTO.getFieldName(), preciseFilterFieldDTO.getFieldValueList());
}
}
// 模糊搜索
if (!ValidateUtils.isEmptyList(dto.getFuzzySearchDTOList())) {
for (PaginationFuzzySearchFieldDTO fuzzySearchFieldDTO: dto.getFuzzySearchDTOList()) {
voList = PaginationUtil.pageByFuzzyFilter(voList, fuzzySearchFieldDTO.getFieldValue(), Arrays.asList(fuzzySearchFieldDTO.getFieldName()));
}
}
// 分页
return PaginationUtil.pageBySubData(voList, dto);
}
@ApiOperation(value = "集群所有Kafka-User信息(不分页)")
@GetMapping(value = "clusters/{clusterPhyId}/kafka-users")
@ResponseBody
public Result<List<KafkaUserVO>> getKafkaUsers(@PathVariable Long clusterPhyId, @RequestParam(required = false) String searchKeyword) {
List<KafkaUserPO> poList = kafkaUserService.getKafkaUserByClusterIdFromDB(clusterPhyId, searchKeyword);
return Result.buildSuc(KafkaUserVOConverter.convert2KafkaUserVOList(clusterPhyId, poList));
}
@ApiOperation(value = "集群所有Kafka-User信息(分页)")
@PostMapping(value = "clusters/{clusterPhyId}/kafka-users")
@ResponseBody
public PaginationResult<KafkaUserVO> getKafkaUsers(@PathVariable Long clusterPhyId, @RequestBody @Validated PaginationBaseDTO dto) {
PaginationResult<KafkaUserPO> paginationResult = kafkaUserService.pagingKafkaUserFromDB(clusterPhyId, dto);
if (paginationResult.failed()) {
return PaginationResult.buildFailure(paginationResult, dto);
}
return PaginationResult.buildSuc(KafkaUserVOConverter.convert2KafkaUserVOList(clusterPhyId, paginationResult.getData().getBizData()), paginationResult);
}
}

View File

@@ -0,0 +1,71 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.cluster;
import com.xiaojukeji.know.streaming.km.biz.cluster.ClusterTopicsManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.ClusterTopicsOverviewDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricsTopicDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.res.ClusterPhyTopicsOverviewVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metadata.TopicMetadataVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.converter.TopicVOConverter;
import com.xiaojukeji.know.streaming.km.common.utils.PaginationUtil;
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicMetricService;
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.Arrays;
import java.util.List;
/**
* @author zengqiao
* @date 22/02/21
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "集群Topics-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class ClusterTopicsController {
@Autowired
private TopicService topicService;
@Autowired
private TopicMetricService topicMetricService;
@Autowired
private ClusterTopicsManager clusterTopicsManager;
@ApiOperation(value = "集群Topic元信息")
@GetMapping(value = "clusters/{clusterPhyId}/topics-metadata")
@ResponseBody
public Result<List<TopicMetadataVO>> getClusterPhyTopicsMetadata(@PathVariable Long clusterPhyId, @RequestParam(required = false) String searchKeyword) {
return Result.buildSuc(
PaginationUtil.pageByFuzzyFilter(
TopicVOConverter.convert2TopicMetadataVOList(topicService.listTopicsFromDB(clusterPhyId)),
searchKeyword,
Arrays.asList("topicName")
)
);
}
@ApiOperation(value = "集群Topics信息列表")
@PostMapping(value = "clusters/{clusterPhyId}/topics-overview")
@ResponseBody
public PaginationResult<ClusterPhyTopicsOverviewVO> getClusterPhyTopicsOverview(@PathVariable Long clusterPhyId,
@Validated @RequestBody ClusterTopicsOverviewDTO dto) {
return clusterTopicsManager.getClusterPhyTopicsOverview(clusterPhyId, dto);
}
@ApiOperation(value = "集群Topics指标信息")
@PostMapping(value = "clusters/{clusterPhyId}/topic-metrics")
@ResponseBody
public Result<List<MetricMultiLinesVO>> getClusterPhyMetrics(@PathVariable Long clusterPhyId, @Validated @RequestBody MetricsTopicDTO param) {
return topicMetricService.listTopicMetricsFromES(clusterPhyId, param);
}
}

View File

@@ -0,0 +1,43 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.cluster;
import com.xiaojukeji.know.streaming.km.biz.cluster.MultiClusterPhyManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.MultiClusterDashboardDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.ClusterPhysStateVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.ClusterPhyDashboardVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 22/02/21
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "集群多Phy-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class MultiClusterPhyController {
@Autowired
private MultiClusterPhyManager multiClusterPhyManager;
@ApiOperation(value = "多物理集群-大盘", notes = "")
@PostMapping(value = "physical-clusters/dashboard")
@ResponseBody
public PaginationResult<ClusterPhyDashboardVO> getClusterPhyBasic(@RequestBody @Validated MultiClusterDashboardDTO dto) {
return multiClusterPhyManager.getClusterPhysDashboard(dto);
}
@ApiOperation(value = "多物理集群-状态", notes = "")
@GetMapping(value = "physical-clusters/state")
@ResponseBody
public Result<ClusterPhysStateVO> getClusterPhysState() {
return Result.buildSuc(ConvertUtil.obj2Obj(multiClusterPhyManager.getClusterPhysState(), ClusterPhysStateVO.class));
}
}

View File

@@ -0,0 +1,128 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.config;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.biz.broker.BrokerConfigManager;
import com.xiaojukeji.know.streaming.km.biz.topic.TopicConfigManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.config.KafkaConfigModifyBrokerDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.config.KafkaConfigModifyTopicDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationMulPreciseFilterDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.kafkaconfig.KafkaTopicDefaultConfig;
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.kafkaconfig.KafkaConfigDetail;
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.config.KafkaBrokerConfigModifyParam;
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.config.KafkaTopicConfigParam;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.config.kafka.KafkaTopicDefaultConfigVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.config.kafka.KafkaBrokerConfigVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.config.kafka.KafkaTopicConfigVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.common.utils.PaginationUtil;
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicConfigService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author zengqiao
* @date 22/02/24
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "KafkaConfig-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class KafkaConfigController {
@Autowired
private TopicConfigService kafkaConfigService;
@Autowired
private TopicConfigManager topicConfigManager;
@Autowired
private BrokerConfigManager brokerConfigManager;
@ApiOperation(value = "Config-Broker配置查看", notes = "")
@PostMapping(value = "clusters/{clusterPhyId}/config-brokers/{brokerId}/configs")
@ResponseBody
public PaginationResult<KafkaBrokerConfigVO> getConfigBroker(@PathVariable Long clusterPhyId,
@PathVariable Integer brokerId,
@RequestBody PaginationMulPreciseFilterDTO dto) {
Result<List<KafkaBrokerConfigVO>> configResult = brokerConfigManager.getBrokerConfigDetail(clusterPhyId, brokerId);
if (configResult.failed()) {
return PaginationResult.buildFailure(configResult, dto);
}
return PaginationUtil.pageBySubData(
PaginationUtil.pageByPreciseFilter(
PaginationUtil.pageByFuzzyFilter(configResult.getData(), dto.getSearchKeywords(), Arrays.asList("name")),
dto.getPreciseFilterDTOList()
),
dto
);
}
@ApiOperation(value = "Config-Topic默认配置", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/config-topics/default")
@ResponseBody
public Result<List<KafkaTopicDefaultConfigVO>> getDefaultTopicConfig(@PathVariable Long clusterPhyId) {
Result<List<KafkaTopicDefaultConfig>> configResult = topicConfigManager.getDefaultTopicConfig(clusterPhyId);
if (configResult.failed()) {
return Result.buildFromIgnoreData(configResult);
}
return Result.buildSuc(ConvertUtil.list2List(configResult.getData(), KafkaTopicDefaultConfigVO.class));
}
@ApiOperation(value = "Config-Topic配置查看", notes = "")
@PostMapping(value = "clusters/{clusterPhyId}/config-topics/{topicName}/configs")
@ResponseBody
public PaginationResult<KafkaTopicConfigVO> getConfigTopic(@PathVariable Long clusterPhyId,
@PathVariable String topicName,
@RequestBody PaginationMulPreciseFilterDTO dto) {
Result<List<KafkaConfigDetail>> configResult = kafkaConfigService.getTopicConfigDetailFromKafka(clusterPhyId, topicName);
if (configResult.failed()) {
return PaginationResult.buildFailure(configResult, dto);
}
return PaginationUtil.pageBySubData(
PaginationUtil.pageByPreciseFilter(
PaginationUtil.pageByFuzzyFilter(ConvertUtil.list2List(configResult.getData(), KafkaTopicConfigVO.class), dto.getSearchKeywords(), Arrays.asList("name")),
dto.getPreciseFilterDTOList()
),
dto
);
}
@ApiOperation(value = "Config-Broker配置修改", notes = "")
@PutMapping(value = "config-brokers")
@ResponseBody
public Result<Void> modifyKafkaBrokerConfig(@Validated @RequestBody KafkaConfigModifyBrokerDTO dto) {
return brokerConfigManager.modifyBrokerConfig(
new KafkaBrokerConfigModifyParam(
dto.getClusterId(),
dto.getBrokerId(),
new HashMap<String, String>((Map) dto.getChangedProps()),
dto.getApplyAll()
),
HttpRequestUtil.getOperator()
);
}
@ApiOperation(value = "Config-Topic配置修改", notes = "")
@PutMapping(value = "config-topics")
@ResponseBody
public Result<Void> modifyKafkaTopicConfig(@Validated @RequestBody KafkaConfigModifyTopicDTO dto) {
return kafkaConfigService.modifyTopicConfig(
new KafkaTopicConfigParam(dto.getClusterId(), dto.getTopicName(), (Map) dto.getChangedProps()),
HttpRequestUtil.getOperator()
);
}
}

View File

@@ -0,0 +1,27 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.config;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.vo.config.kafka.KafkaInZKConfigChangedVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 22/02/24
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "KafkaConfigInZK-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class KafkaInZKConfigController {
@ApiOperation(value = "Kafka-ZK上的配置变更记录", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/in-zk/config-changed-history")
@ResponseBody
public PaginationResult<KafkaInZKConfigChangedVO> getKafkaInZKConfigChangedHistory(@PathVariable Long clusterPhyId,
PaginationBaseDTO dto) {
return PaginationResult.buildSuc(dto);
}
}

View File

@@ -0,0 +1,52 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.config;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.common.bean.dto.config.platform.PlatformClusterConfigDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.po.config.PlatformClusterConfigPO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.config.platform.PlatformClusterConfigVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.core.service.config.PlatformClusterConfigService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 22/02/24
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "PlatformClusterConfig-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class PlatformClusterConfigController {
@Autowired
private PlatformClusterConfigService platformClusterConfigService;
@ApiOperation(value = "平台集群配置-查看", notes = "")
@GetMapping(value = "platform-configs/clusters/{clusterId}/groups/{groupName}/configs")
@ResponseBody
public Result<List<PlatformClusterConfigVO>> getPlatformClusterConfig(@PathVariable Long clusterId, @PathVariable String groupName) {
return Result.buildSuc(ConvertUtil.list2List(
new ArrayList<>(platformClusterConfigService.getByClusterAndGroupWithoutDefault(clusterId, groupName).values()),
PlatformClusterConfigVO.class)
);
}
@ApiOperation(value = "平台集群配置-替换", notes = "")
@PutMapping(value = "platform-configs")
@ResponseBody
public Result<Void> putPlatformClusterConfig(@Validated @RequestBody List<PlatformClusterConfigDTO> dtoList) {
List<PlatformClusterConfigPO> poList = ConvertUtil.list2List(dtoList, PlatformClusterConfigPO.class);
poList.stream().forEach(elem -> elem.setOperator(HttpRequestUtil.getOperator()));
return platformClusterConfigService.batchReplace(poList, HttpRequestUtil.getOperator());
}
}

View File

@@ -0,0 +1,65 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.group;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.biz.group.GroupManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.group.GroupOffsetResetDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.group.GroupTopicConsumedDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.po.group.GroupMemberPO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.group.GroupTopicConsumedDetailVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metadata.GroupMetadataCombineExistVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.core.service.group.GroupService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 22/02/22
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Group-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class GroupController {
@Autowired
private GroupManager groupManager;
@Autowired
private GroupService groupService;
@ApiOperation(value = "重置组消费偏移", notes = "")
@PutMapping(value = "group-offsets")
@ResponseBody
public Result<Void> resetGroupOffsets(@Validated @RequestBody GroupOffsetResetDTO dto) throws Exception {
return groupManager.resetGroupOffsets(dto, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "Group-Topic指标信息", notes = "")
@PostMapping(value = "clusters/{clusterId}/topics/{topicName}/groups/{groupName}/metric")
@ResponseBody
public PaginationResult<GroupTopicConsumedDetailVO> getTopicGroupMetric(@PathVariable Long clusterId,
@PathVariable String topicName,
@PathVariable String groupName,
@RequestBody GroupTopicConsumedDTO dto) throws Exception {
return groupManager.pagingGroupTopicConsumedMetrics(clusterId, topicName, groupName, dto.getLatestMetricNames(), dto);
}
@ApiOperation(value = "Group元信息", notes = "带是否存在信息")
@GetMapping(value = "clusters/{clusterPhyId}/groups/{groupName}/topics/{topicName}/metadata-combine-exist")
@ResponseBody
public Result<GroupMetadataCombineExistVO> getGroupMetadataCombineExist(@PathVariable Long clusterPhyId,
@PathVariable String groupName,
@PathVariable String topicName) {
GroupMemberPO po = groupService.getGroupFromDB(clusterPhyId, groupName, topicName);
if (po == null) {
return Result.buildSuc(new GroupMetadataCombineExistVO(clusterPhyId, groupName, topicName, false));
}
return Result.buildSuc(new GroupMetadataCombineExistVO(clusterPhyId, groupName, topicName, true));
}
}

View File

@@ -0,0 +1,73 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.health;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.health.HealthCheckConfigVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.health.HealthScoreBaseResultVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.health.HealthScoreResultDetailVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.converter.HealthScoreVOConverter;
import com.xiaojukeji.know.streaming.km.common.enums.config.ConfigGroupEnum;
import com.xiaojukeji.know.streaming.km.common.enums.health.HealthCheckDimensionEnum;
import com.xiaojukeji.know.streaming.km.core.service.health.checkresult.HealthCheckResultService;
import com.xiaojukeji.know.streaming.km.core.service.health.score.HealthScoreService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
/**
* @author zengqiao
* @date 22/02/23
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "KafkaHealth-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class KafkaHealthController {
@Autowired
private HealthScoreService healthScoreService;
@Autowired
private HealthCheckResultService healthCheckResultService;
@ApiOperation(value = "集群-健康检查详情")
@GetMapping(value = "clusters/{clusterPhyId}/health-detail")
@ResponseBody
public Result<List<HealthScoreResultDetailVO>> getClusterHealthCheckResultDetail(@PathVariable Long clusterPhyId,
@RequestParam(required = false) Integer dimensionCode) {
HealthCheckDimensionEnum dimensionEnum = HealthCheckDimensionEnum.getByCode(dimensionCode);
if (!dimensionEnum.equals(HealthCheckDimensionEnum.UNKNOWN)) {
return Result.buildSuc(HealthScoreVOConverter.convert2HealthScoreResultDetailVOList(healthScoreService.getDimensionHealthScoreResult(clusterPhyId, dimensionEnum), false));
}
return Result.buildSuc(HealthScoreVOConverter.convert2HealthScoreResultDetailVOList(
healthScoreService.getClusterHealthScoreResult(clusterPhyId),
true
));
}
@ApiOperation(value = "具体资源-健康检查详情")
@GetMapping(value = "clusters/{clusterPhyId}/dimensions/{dimensionCode}/resources/{resName}/health-detail")
@ResponseBody
public Result<List<HealthScoreBaseResultVO>> getClusterResHealthCheckResult(@PathVariable Long clusterPhyId,
@PathVariable Integer dimensionCode,
@PathVariable String resName) {
return Result.buildSuc(HealthScoreVOConverter.convert2HealthScoreBaseResultVOList(
healthScoreService.getResHealthScoreResult(clusterPhyId, dimensionCode, resName)
));
}
@ApiOperation(value = "健康检查配置")
@GetMapping(value = "clusters/{clusterPhyId}/health-configs")
@ResponseBody
public Result<List<HealthCheckConfigVO>> getHealthCheckConfig(@PathVariable Long clusterPhyId) {
return Result.buildSuc(
HealthScoreVOConverter.convert2HealthCheckConfigVOList(
ConfigGroupEnum.HEALTH.name(),
new ArrayList<>(healthCheckResultService.getClusterHealthConfig(clusterPhyId).values())
));
}
}

View File

@@ -0,0 +1,113 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.job;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.common.bean.dto.job.JobPaginationDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.job.JobDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.job.*;
import com.xiaojukeji.know.streaming.km.common.bean.vo.job.sub.SubJobPartitionDetailVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.enums.job.JobTypeEnum;
import com.xiaojukeji.know.streaming.km.core.service.job.JobService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
/**
* @author zengqiao
* @date 22/02/23
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Jobs-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class KSJobController {
@Autowired
private JobService jobService;
@ApiOperation(value = "任务类型", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/jobs/type-enums")
@ResponseBody
public Result<List<JobTypeVO>> types(@PathVariable Long clusterPhyId) {
return Result.buildSuc(Arrays.stream(JobTypeEnum.values())
.map(j -> new JobTypeVO(j.getType(), j.getMessage()))
.collect(Collectors.toList()));
}
@ApiOperation(value = "创建任务")
@PostMapping(value = "clusters/{clusterPhyId}/jobs")
@ResponseBody
public Result<Void> createJob(@PathVariable Long clusterPhyId, @RequestBody JobDTO jobDTO) {
return jobService.addTask(clusterPhyId, jobDTO, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "删除任务")
@DeleteMapping(value = "clusters/{clusterPhyId}/jobs/{jobId}")
@ResponseBody
public Result<Void> deleteJobById(@PathVariable Long clusterPhyId, @PathVariable Long jobId) {
return jobService.deleteById(clusterPhyId, jobId, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "修改任务")
@PutMapping(value = "clusters/{clusterPhyId}/jobs")
@ResponseBody
public Result<Void> modifyJobById(@PathVariable Long clusterPhyId, @RequestBody JobDTO jobDTO) {
return jobService.updateTask(clusterPhyId, jobDTO, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "集群任务列表")
@PostMapping(value = "clusters/{clusterPhyId}/jobs-overview")
@ResponseBody
public PaginationResult<JobOverViewVO> pagingJobs(@PathVariable Long clusterPhyId, @RequestBody JobPaginationDTO dto) {
return jobService.pagingJobs(clusterPhyId, dto);
}
@ApiOperation(value = "集群任务状态")
@GetMapping(value = "clusters/{clusterPhyId}/jobs-state")
@ResponseBody
public Result<JobStateVO> state(@PathVariable Long clusterPhyId) {
return jobService.state(clusterPhyId);
}
@ApiOperation(value = "任务-详细信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/jobs/{jobId}/modify-detail")
@ResponseBody
public Result<JobModifyDetailVO> getJobModifyDetail(@PathVariable Long clusterPhyId, @PathVariable Long jobId) {
return jobService.getJobModifyDetail(clusterPhyId, jobId);
}
@ApiOperation(value = "任务-详细信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/jobs/{jobId}/detail")
@ResponseBody
public Result<JobDetailVO> getJobDetail(@PathVariable Long clusterPhyId, @PathVariable Long jobId) {
return jobService.getJobDetail(clusterPhyId, jobId);
}
@ApiOperation(value = "子任务-partition-详细信息", notes = "")
@PostMapping(value = "clusters/{clusterPhyId}/jobs/{jobId}/{topic}/partition-detail")
@ResponseBody
public Result<List<SubJobPartitionDetailVO>> getSubJobPartitionDetail(@PathVariable Long clusterPhyId, @PathVariable Long jobId, @PathVariable String topic) {
return jobService.getSubJobPartitionDetail(clusterPhyId, jobId, topic);
}
@ApiOperation(value = "任务-节点流量", notes = "")
@PostMapping(value = "clusters/{clusterPhyId}/jobs/{jobId}/node/traffic")
@ResponseBody
public Result<List<JobTrafficBrokerVO>> getJobNodeTraffic(@PathVariable Long clusterPhyId, @PathVariable Long jobId) {
return jobService.getJobNodeTraffic(clusterPhyId, jobId);
}
@ApiOperation(value = "任务-更新节点流量单位byte", notes = "")
@PostMapping(value = "clusters/{clusterPhyId}/jobs/{jobId}/traffic/{limit}")
@ResponseBody
public Result<Void> updateJobTrafficLimit(@PathVariable Long clusterPhyId, @PathVariable Long jobId, @PathVariable Long limit) {
return jobService.updateJobTrafficLimit(clusterPhyId, jobId, limit, HttpRequestUtil.getOperator());
}
}

View File

@@ -0,0 +1,39 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.kafkacontroller;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.po.kafkacontrollr.KafkaControllerPO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.kafkacontroller.KafkaControllerVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.core.service.kafkacontroller.KafkaControllerService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 22/02/24
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "KafkaController-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class KafkaController {
@Autowired
private KafkaControllerService kafkaControllerService;
@ApiOperation(value = "KafkaController变更历史")
@GetMapping(value = "clusters/{clusterPhyId}/controller-history")
@ResponseBody
public PaginationResult<KafkaControllerVO> getKafkaControllerHistory(@PathVariable Long clusterPhyId, PaginationBaseDTO dto) {
IPage<KafkaControllerPO> iPage = kafkaControllerService.pagingControllerHistories(clusterPhyId, dto.getPageNo(), dto.getPageSize(), dto.getSearchKeywords());
return PaginationResult.buildSuc(
ConvertUtil.list2List(iPage.getRecords(), KafkaControllerVO.class),
iPage
);
}
}

View File

@@ -0,0 +1,60 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.kafkauser;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.biz.kafkauser.KafkaUserManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.kafkauser.ClusterKafkaUserDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.kafkauser.ClusterKafkaUserTokenDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.kafkauser.KafkaUserParam;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.kafkauser.KafkaUserTokenVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.core.service.kafkauser.KafkaUserService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 22/02/23
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "KafkaUser-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class KafkaUserController {
@Autowired
private KafkaUserService kafkaUserService;
@Autowired
private KafkaUserManager kafkaUserManager;
@ApiOperation(value = "创建KafkaUser", notes = "")
@PostMapping(value ="kafka-users")
@ResponseBody
public Result<Void> createKafkaUser(@Validated @RequestBody ClusterKafkaUserTokenDTO dto) {
return kafkaUserManager.createKafkaUserWithTokenEncrypted(dto, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "删除KafkaUser", notes = "")
@DeleteMapping(value ="kafka-users")
@ResponseBody
public Result<Void> deleteKafkaUser(@Validated @RequestBody ClusterKafkaUserDTO dto) {
return kafkaUserService.deleteKafkaUser(new KafkaUserParam(dto.getClusterId(), dto.getKafkaUser()), HttpRequestUtil.getOperator());
}
@ApiOperation(value = "修改KafkaUser密码", notes = "")
@PutMapping(value ="kafka-users/token")
@ResponseBody
public Result<Void> modifyKafkaUserToken(@Validated @RequestBody ClusterKafkaUserTokenDTO dto) {
return kafkaUserManager.modifyKafkaUserWithTokenEncrypted(dto, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "查看KafkaUser密码", notes = "")
@GetMapping(value ="clusters/{clusterPhyId}/kafka-users/{kafkaUser}/token")
@ResponseBody
public Result<KafkaUserTokenVO> getKafkaUserToken(@PathVariable Long clusterPhyId, @PathVariable String kafkaUser) {
return kafkaUserManager.getKafkaUserTokenWithEncrypt(clusterPhyId, kafkaUser);
}
}

View File

@@ -0,0 +1,28 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.open;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
/**
* @author zengqiao
* @date 20/6/18
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "直接开放-应用探活-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_OPEN_PREFIX)
public class HealthController {
@ApiOperation(value = "探活", notes = "")
@GetMapping(path = "health")
@ResponseBody
public Result<String> health() {
return Result.buildSuc();
}
}

View File

@@ -0,0 +1,58 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.reassign;
import com.xiaojukeji.know.streaming.km.biz.reassign.ReassignManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.reassign.ReassignTopicOverviewDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.reassign.change.CreateChangeReplicasPlanDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.reassign.move.CreateMoveReplicaPlanDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.reassign.ReassignTopicOverviewVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.reassign.plan.ReassignPlanVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.util.List;
/**
* 原生迁移,无高级特性
* @author zengqiao
* @date 22/05/06
*/
@Validated
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "迁移(Community)-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class CommunityReassignController {
@Autowired
private ReassignManager reassignManager;
/**************************************************** 获取迁移计划 ****************************************************/
@ApiOperation(value = "副本扩缩计划-生成", notes = "")
@PostMapping(value ="reassignment/replicas-change-plan")
@ResponseBody
public Result<ReassignPlanVO> createReplicasChangePlanJson(@RequestBody @Valid List<CreateChangeReplicasPlanDTO> dtoList) {
return reassignManager.createReplicaChangePlanJson(dtoList);
}
@ApiOperation(value = "副本迁移计划-生成", notes = "")
@PostMapping(value ="reassignment/replicas-move-plan")
@ResponseBody
public Result<ReassignPlanVO> createReplicasMovePlanJson(@RequestBody @Valid List<CreateMoveReplicaPlanDTO> dtoList) {
return reassignManager.createReassignmentPlanJson(dtoList);
}
/**************************************************** 查询信息 ****************************************************/
@ApiOperation(value = "迁移任务-Topic概览信息", notes = "")
@PostMapping(value ="reassignment/topics-overview")
@ResponseBody
public Result<List<ReassignTopicOverviewVO>> getReassignmentTopicsOverview(@RequestBody @Valid ReassignTopicOverviewDTO dto) {
return reassignManager.getReassignmentTopicsOverview(dto);
}
}

View File

@@ -0,0 +1,55 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.replica;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BaseMetrics;
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.ReplicationMetrics;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.core.service.replica.ReplicaMetricService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 22/02/23
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "ReplicaMetrics-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class ReplicaMetricsController {
@Autowired
private ReplicaMetricService replicationMetricService;
@ApiOperation(value = "Replica指标-单个Replica")
@PostMapping(value = "clusters/{clusterPhyId}/brokers/{brokerId}/topics/{topicName}/partitions/{partitionId}/metric-points")
@ResponseBody
public Result<List<MetricPointVO>> getReplicaMetricPoints(@PathVariable Long clusterPhyId,
@PathVariable Integer brokerId,
@PathVariable String topicName,
@PathVariable Integer partitionId,
@RequestBody MetricDTO dto) {
return replicationMetricService.getMetricPointsFromES(clusterPhyId, brokerId, topicName, partitionId, dto);
}
@ApiOperation(value = "Replica指标-单个Replica")
@PostMapping(value = "clusters/{clusterPhyId}/brokers/{brokerId}/topics/{topicName}/partitions/{partitionId}/latest-metrics")
@ResponseBody
public Result<BaseMetrics> getReplicaMetricPoints(@PathVariable Long clusterPhyId,
@PathVariable Integer brokerId,
@PathVariable String topicName,
@PathVariable Integer partitionId,
@RequestBody List<String> metricsNames) {
Result<ReplicationMetrics> metricsResult = replicationMetricService.getLatestMetricsFromES(clusterPhyId, brokerId, topicName, partitionId, metricsNames);
if (metricsResult.failed()) {
return Result.buildFromIgnoreData(metricsResult);
}
return Result.buildSuc(metricsResult.getData());
}
}

View File

@@ -0,0 +1,39 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.self;
import com.xiaojukeji.know.streaming.km.biz.self.SelfManager;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.self.SelfMetricsVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.Properties;
/**
* @author zengqiao
* @date 20/6/18
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "自身信息-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class SelfController {
@Autowired
private SelfManager selfManager;
@ApiOperation(value = "指标", notes = "")
@GetMapping(path = "self/metrics")
@ResponseBody
public Result<SelfMetricsVO> metrics() {
return selfManager.metrics();
}
@ApiOperation(value = "版本", notes = "")
@GetMapping(path = "self/version")
@ResponseBody
public Result<Properties> version() {
return selfManager.version();
}
}

View File

@@ -0,0 +1,86 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.topic;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.biz.topic.OpTopicManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.topic.ClusterTopicDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.topic.TopicCreateDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.topic.TopicExpansionDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.topic.TopicParam;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.Topic;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metadata.TopicMetadataCombineExistVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metadata.TopicMetadataVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
import com.xiaojukeji.know.streaming.km.common.converter.TopicVOConverter;
import com.xiaojukeji.know.streaming.km.core.service.topic.OpTopicService;
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 22/02/22
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Topic-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class TopicController {
@Autowired
private TopicService topicService;
@Autowired
private OpTopicManager opTopicManager;
@Autowired
private OpTopicService opTopicService;
@ApiOperation(value = "Topic创建", notes = "")
@PostMapping(value = "topics")
@ResponseBody
public Result<Void> createTopic(@Validated @RequestBody TopicCreateDTO dto) {
return opTopicManager.createTopic(dto, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "Topic删除", notes = "")
@DeleteMapping(value ="topics")
@ResponseBody
public Result<Void> deleteTopics(@Validated @RequestBody ClusterTopicDTO dto) {
return opTopicService.deleteTopic(new TopicParam(dto.getClusterId(), dto.getTopicName()), HttpRequestUtil.getOperator());
}
@ApiOperation(value = "Topic扩分区", notes = "")
@PostMapping(value = "topics/expand-partitions")
@ResponseBody
public Result<Void> expandTopics(@Validated @RequestBody TopicExpansionDTO dto) {
return opTopicManager.expandTopic(dto, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "Topic元信息", notes = "带是否存在信息")
@GetMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/metadata-combine-exist")
@ResponseBody
public Result<TopicMetadataCombineExistVO> getTopicsMetadataCombineExist(@PathVariable Long clusterPhyId,
@PathVariable String topicName) {
Topic topic = topicService.getTopic(clusterPhyId, topicName);
return Result.buildSuc(TopicVOConverter.convert2TopicMetadataCombineExistVO(topicName, topic));
}
@ApiOperation(value = "Topic元信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/metadata")
@ResponseBody
public Result<TopicMetadataVO> getTopicsMetadata(@PathVariable Long clusterPhyId,
@PathVariable String topicName) {
Topic topic = topicService.getTopic(clusterPhyId, topicName);
if (topic == null) {
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getTopicNotExist(clusterPhyId, topicName));
}
return Result.buildSuc(TopicVOConverter.convert2TopicMetadataVO(topic));
}
}

View File

@@ -0,0 +1,142 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.topic;
import com.xiaojukeji.know.streaming.km.biz.topic.TopicStateManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.topic.TopicRecordDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BaseMetrics;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.po.KafkaAclPO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.acl.AclBindingVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.group.GroupTopicBasicVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.TopicBrokersPartitionsSummaryVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.TopicStateVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.broker.TopicBrokerAllVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.TopicRecordVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.partition.TopicPartitionVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.converter.KafkaAclConverter;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.common.utils.PaginationUtil;
import com.xiaojukeji.know.streaming.km.core.service.acl.KafkaAclService;
import com.xiaojukeji.know.streaming.km.core.service.group.GroupService;
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicMetricService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.Arrays;
import java.util.List;
/**
* @author zengqiao
* @date 22/02/23
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "TopicState-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class TopicStateController {
@Autowired
private KafkaAclService kafkaAclService;
@Autowired
private TopicStateManager topicStateManager;
@Autowired
private TopicMetricService topicMetricService;
@Autowired
private GroupService groupService;
@ApiOperation(value = "Topic-State信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/state")
@ResponseBody
public Result<TopicStateVO> getTopicState(@PathVariable Long clusterPhyId, @PathVariable String topicName) {
return topicStateManager.getTopicState(clusterPhyId, topicName);
}
@ApiOperation(value = "Topic-Broker信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/brokers")
@ResponseBody
public Result<TopicBrokerAllVO> getTopicBrokers(@PathVariable Long clusterPhyId,
@PathVariable String topicName,
@RequestParam(required = false) String searchKeyword) throws Exception {
return Result.buildSuc(topicStateManager.getTopicBrokerAll(clusterPhyId, topicName, searchKeyword));
}
@ApiOperation(value = "Topic-Broker-Partition统计信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/brokers-partitions-summary")
@ResponseBody
public Result<TopicBrokersPartitionsSummaryVO> getTopicBrokersPartitionsSummary(@PathVariable Long clusterPhyId,
@PathVariable String topicName) throws Exception {
return topicStateManager.getTopicBrokersPartitionsSummary(clusterPhyId, topicName);
}
@ApiOperation(value = "Topic-Partition信息", notes = "")
@PostMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/partitions")
@ResponseBody
public Result<List<TopicPartitionVO>> getTopicPartitions(@PathVariable Long clusterPhyId,
@PathVariable String topicName,
@RequestBody List<String> metricsNames) throws Exception {
return topicStateManager.getTopicPartitions(clusterPhyId, topicName, metricsNames);
}
@ApiOperation(value = "Topic-Messages信息", notes = "")
@PostMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/records")
@ResponseBody
public Result<List<TopicRecordVO>> getTopicMessages(@PathVariable Long clusterPhyId,
@PathVariable String topicName,
@Validated @RequestBody TopicRecordDTO dto) throws Exception {
return topicStateManager.getTopicMessages(clusterPhyId, topicName, dto);
}
@ApiOperation(value = "Topic-ACL信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/acl-Bindings")
@ResponseBody
public PaginationResult<AclBindingVO> getTopicAclBindings(@PathVariable Long clusterPhyId,
@PathVariable String topicName,
PaginationBaseDTO dto) {
List<KafkaAclPO> poList = kafkaAclService.getTopicAclFromDB(clusterPhyId, topicName);
// 分页
return PaginationUtil.pageBySubData(
// 搜索
PaginationUtil.pageByFuzzyFilter(
KafkaAclConverter.convert2AclBindingVOList(poList),
dto.getSearchKeywords(),
Arrays.asList("kafkaUser")
),
dto
);
}
@ApiOperation(value = "Topic指标-单个Topic")
@PostMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/metric-points")
@ResponseBody
public Result<List<MetricPointVO>> getTopicMetricPoints(@PathVariable Long clusterPhyId,
@PathVariable String topicName,
@RequestBody MetricDTO dto) {
return topicMetricService.getMetricPointsFromES(clusterPhyId, topicName, dto);
}
@ApiOperation(value = "Topic近期指标-单个Topic")
@PostMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/latest-metrics")
@ResponseBody
public Result<BaseMetrics> getTopicLatestMetrics(@PathVariable Long clusterPhyId,
@PathVariable String topicName,
@RequestBody List<String> metricsNames) {
return Result.buildSuc(topicMetricService.getTopicLatestMetricsFromES(clusterPhyId, topicName, metricsNames));
}
@ApiOperation(value = "TopicGroups基本信息列表")
@GetMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/groups-basic")
@ResponseBody
public Result<List<GroupTopicBasicVO>> getTopicGroupsBasic(@PathVariable Long clusterPhyId,
@PathVariable String topicName) {
return Result.buildSuc(ConvertUtil.list2List(groupService.listGroupByTopic(clusterPhyId, topicName), GroupTopicBasicVO.class));
}
}

View File

@@ -0,0 +1,88 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.util;
import com.xiaojukeji.know.streaming.km.common.bean.dto.util.ValidateKafkaDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
import com.xiaojukeji.know.streaming.km.common.bean.entity.util.KafkaValidate;
import com.xiaojukeji.know.streaming.km.common.bean.vo.util.KafkaBSValidateVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterValidateService;
import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import kafka.zk.KafkaZkClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.Properties;
/**
* @author zengqiao
* @date 22/02/22
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Utils-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class UtilsController {
private static final Logger log = LoggerFactory.getLogger(UtilsController.class);
@Autowired
private KafkaAdminZKClient kafkaAdminZKClient;
@Autowired
private ClusterValidateService clusterValidateService;
@ApiOperation(value = "Kafka地址校验", notes = "")
@PostMapping(value = "utils/kafka-validator")
@ResponseBody
public Result<KafkaBSValidateVO> validateKafka(@RequestBody ValidateKafkaDTO dto) {
Result<KafkaValidate> rkv = this.clusterValidateService.checkKafkaLegal(
dto.getBootstrapServers(),
dto.getClientProperties() == null? new Properties(): dto.getClientProperties(),
dto.getZookeeper());
if (rkv.failed()) {
return Result.buildFromIgnoreData(rkv);
}
return Result.buildSuc(ConvertUtil.obj2Obj(rkv.getData(), KafkaBSValidateVO.class));
}
@ApiOperation(value = "查看ZK信息")
@GetMapping(value = "utils/zookeeper-data")
@ResponseBody
public Result<Object> getZKData(@RequestParam("clusterPhyId") Long clusterPhyId,
@RequestParam String cmd,
@RequestParam String path,
@RequestParam Integer version) throws Exception {
KafkaZkClient kafkaZkClient = kafkaAdminZKClient.getClient(clusterPhyId);
if (kafkaZkClient == null) {
return Result.buildFrom(ResultStatus.NOT_EXIST);
}
try {
switch (cmd) {
case "ls":
return Result.buildSuc(kafkaZkClient.currentZooKeeper().getChildren(path, false));
case "get":
return Result.buildSuc(new String(kafkaZkClient.currentZooKeeper().getData(path, null, null)));
case "del":
kafkaZkClient.currentZooKeeper().delete(path, version);
return Result.buildSuc();
default:
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "cmd only support ls and get");
}
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
log.error("method=getZKData||clusterPhyId={}||cmd={}||path={}||errMsg=read failed.", clusterPhyId, cmd, path, ie);
return Result.buildFromRSAndMsg(ResultStatus.ZK_OPERATE_FAILED, ie.getMessage());
} catch (Exception e) {
log.error("method=getZKData||clusterPhyId={}||cmd={}||path={}||errMsg=read failed.", clusterPhyId, cmd, path, e);
return Result.buildFromRSAndMsg(ResultStatus.ZK_OPERATE_FAILED, e.getMessage());
}
}
}

View File

@@ -0,0 +1,73 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.version;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.biz.version.VersionControlManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.UserMetricConfigDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.config.metric.UserMetricConfigVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.version.VersionItemVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* @author zengqiao
* @date 21/07/16
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "版本控制&兼容-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class VersionController {
@Autowired
private VersionControlManager versionControlManager;
@ApiOperation(value = "KS支持的kafka版本列表", notes = "")
@GetMapping(value = "support-kafka-versions")
@ResponseBody
public Result<SortedMap<String, Long>> listAllVersions() {
Result<Map<String, Long>> rm = versionControlManager.listAllVersions();
if (rm.failed()) {
return Result.buildFromIgnoreData(rm);
}
return Result.buildSuc(new TreeMap<>(rm.getData()));
}
@ApiOperation(value = "查询当前所有的兼容性(指标、前端操作)配置信息", notes = "")
@GetMapping(value = "kafka-versions-items")
@ResponseBody
public Result<Map<String, VersionItemVO>> listAllVersionItem(){
return versionControlManager.listAllVersionItem();
}
@ApiOperation(value = "集群的版本兼容项,查询当前集群版本下支持的指标或操作", notes = "")
@GetMapping(value = "clusters/{clusterId}/types/{type}/support-kafka-versions")
@ResponseBody
public Result<List<VersionItemVO>> listClusterVersionControlItem(@PathVariable Long clusterId, @PathVariable Integer type) {
return versionControlManager.listClusterVersionControlItem(clusterId, type);
}
@ApiOperation(value = "用户设置的指标显示项", notes = "")
@GetMapping(value = "clusters/{clusterId}/types/{type}/user-metric-config")
@ResponseBody
public Result<List<UserMetricConfigVO>> listUserMetricItem(@PathVariable Long clusterId, @PathVariable Integer type, HttpServletRequest request) {
return versionControlManager.listUserMetricItem(clusterId, type, HttpRequestUtil.getOperator(request));
}
@ApiOperation(value = "用户设置指标显示项", notes = "")
@PostMapping(value = "clusters/{clusterId}/types/{type}/user-metric-config")
@ResponseBody
public Result<Void> updateUserMetricItem(@PathVariable Long clusterId, @PathVariable Integer type,
@RequestBody UserMetricConfigDTO userMetricConfigDTO, HttpServletRequest request){
return versionControlManager.updateUserMetricItem(clusterId, type, userMetricConfigDTO, HttpRequestUtil.getOperator(request));
}
}

View File

@@ -0,0 +1,88 @@
package com.xiaojukeji.know.streaming.km.rest.config;
import com.baomidou.mybatisplus.annotation.DbType;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.core.config.GlobalConfig;
import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor;
import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
/**
* @author zengqiao
* @date 20/3/17
*/
@Configuration
@MapperScan("com.xiaojukeji.know.streaming.km.persistence.mysql")
public class DataSourceConfig {
@Bean(name = "dataSource")
@ConfigurationProperties(prefix = "spring.datasource.know-streaming")
@Primary
public DataSource dataSource() {
return DataSourceBuilder.create().build();
}
@Bean(name = "kmGlobalConfig")
public GlobalConfig globalConfig(){
GlobalConfig globalConfig=new GlobalConfig();
globalConfig.setBanner(false);
GlobalConfig.DbConfig dbConfig=new GlobalConfig.DbConfig();
dbConfig.setIdType(IdType.AUTO);
globalConfig.setDbConfig(dbConfig);
return globalConfig;
}
@Bean(name = "sqlSessionFactory")
@Primary
public SqlSessionFactory sqlSessionFactory(@Qualifier("dataSource") DataSource dataSource) throws Exception {
MybatisSqlSessionFactoryBean bean = new MybatisSqlSessionFactoryBean();
bean.setDataSource(dataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:mybatis/*.xml"));
bean.setGlobalConfig(globalConfig());
// 输出sql
// MybatisConfiguration configuration = new MybatisConfiguration();
// configuration.setJdbcTypeForNull(JdbcType.NULL);
// configuration.setMapUnderscoreToCamelCase(true);
// configuration.setCacheEnabled(false);
// // 配置打印sql语句
// configuration.setLogImpl(StdOutImpl.class);
// bean.setConfiguration(configuration);
//添加分页插件,不加这个,分页不生效
bean.setPlugins(paginationInterceptor());
return bean.getObject();
}
@Bean(name = "transactionManager")
@Primary
public DataSourceTransactionManager transactionManager(@Qualifier("dataSource") DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}
@Bean(name = "sqlSession")
@Primary
public SqlSessionTemplate sqlSessionTemplate(@Qualifier("sqlSessionFactory") SqlSessionFactory sqlSessionFactory) {
return new SqlSessionTemplate(sqlSessionFactory);
}
@Bean
public PaginationInterceptor paginationInterceptor() {
PaginationInterceptor page = new PaginationInterceptor();
page.setDbType(DbType.MYSQL);
return page;
}
}

View File

@@ -0,0 +1,20 @@
package com.xiaojukeji.know.streaming.km.rest.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.TaskScheduler;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
@Configuration
public class ScheduledTaskConfig {
@Value(value = "${thread-pool.scheduled.thread-num:2}")
private Integer scheduledTaskThreadNum;
@Bean
public TaskScheduler taskScheduler() {
ThreadPoolTaskScheduler taskScheduler = new ThreadPoolTaskScheduler();
taskScheduler.setPoolSize(scheduledTaskThreadNum);
return taskScheduler;
}
}

View File

@@ -0,0 +1,88 @@
package com.xiaojukeji.know.streaming.km.rest.config;
import com.didiglobal.logi.security.common.constant.Constants;
import com.google.common.base.Predicates;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.utils.GitPropUtil;
import com.xiaojukeji.know.streaming.km.rest.interceptor.PermissionInterceptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.*;
import springfox.documentation.builders.*;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.service.Contact;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@Configuration
@EnableSwagger2
public class WebConfig implements WebMvcConfigurer {
@Autowired
private PermissionInterceptor permissionInterceptor;
private static final String FE_INDEX_PAGE_HTML = "layout/index";
@Override
public void addViewControllers(ViewControllerRegistry registry) {
// FE-首页
registry.addViewController("/").setViewName(FE_INDEX_PAGE_HTML);
// FE-系统管理因为系统管理模块的uri和前端静态资源名字重复了因此这里为了辨别所以进行了比较详细的规则描述
registry.addViewController("/config").setViewName(FE_INDEX_PAGE_HTML);
registry.addViewController("/config/user").setViewName(FE_INDEX_PAGE_HTML);
registry.addViewController("/config/setting").setViewName(FE_INDEX_PAGE_HTML);
registry.addViewController("/config/operation-log").setViewName(FE_INDEX_PAGE_HTML);
// FE-多集群管理
registry.addViewController("/cluster").setViewName(FE_INDEX_PAGE_HTML);
registry.addViewController("/cluster/**").setViewName(FE_INDEX_PAGE_HTML);
// FE-登录
registry.addViewController("/login").setViewName(FE_INDEX_PAGE_HTML);
}
@Override
public void addInterceptors(InterceptorRegistry registry) {
// 会进行拦截的接口
registry.addInterceptor(permissionInterceptor).addPathPatterns(ApiPrefix.API_PREFIX + "**", Constants.API_PREFIX + "/**");
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
// COMMON
registry.addResourceHandler("/**").addResourceLocations("classpath:/templates/", "classpath:/static/");
// SWAGGER
registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
}
@Bean
public Docket createRestApi() {
return new Docket(DocumentationType.SWAGGER_2)
.apiInfo(apiInfo())
.select()
.apis(Predicates.or(
RequestHandlerSelectors.basePackage("com.xiaojukeji.know.streaming.km.rest.api"),
RequestHandlerSelectors.basePackage("com.didiglobal.logi.security.controller")))
.paths(PathSelectors.any())
.build()
.enable(true);
}
private ApiInfo apiInfo() {
String version = GitPropUtil.getProps(GitPropUtil.VERSION_FIELD_NAME);
String commitId = GitPropUtil.getProps(GitPropUtil.COMMIT_ID_FIELD_NAME);
return new ApiInfoBuilder()
.title("KS-KM 接口文档")
.description("欢迎使用滴滴KS-KM")
.contact(new Contact("zengqiao", "", "zengqiao@didiglobal.com"))
.version(String.format("%s-%s", version == null? "": version, commitId == null? "": commitId))
.build();
}
}

View File

@@ -0,0 +1,47 @@
package com.xiaojukeji.know.streaming.km.rest.handler;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.validation.FieldError;
import org.springframework.web.bind.MethodArgumentNotValidException;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import java.util.List;
import java.util.stream.Collectors;
@RestControllerAdvice
public class CustomGlobalExceptionHandler {
private static final Logger LOGGER = LoggerFactory.getLogger(CustomGlobalExceptionHandler.class);
/**
* 处理参数异常并返回
* @param me 异常
* @return
*/
@ExceptionHandler(MethodArgumentNotValidException.class)
public Result<Void> methodArgumentNotValidException(MethodArgumentNotValidException me) {
List<FieldError> fieldErrorList = me.getBindingResult().getFieldErrors();
List<String> errorList = fieldErrorList.stream().map(elem -> elem.getDefaultMessage()).collect(Collectors.toList());
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, ConvertUtil.list2String(errorList, ","));
}
@ExceptionHandler(NullPointerException.class)
public Result<Void> handleNullPointerException(Exception e) {
LOGGER.error("method=handleNullPointerException||errMsg=exception", e);
return Result.buildFromRSAndMsg(ResultStatus.FAIL, "服务空指针异常");
}
@ExceptionHandler(Exception.class)
public Result<Void> handleException(Exception e) {
LOGGER.error("method=handleException||errMsg=exception", e);
return Result.buildFromRSAndMsg(ResultStatus.FAIL, e.getMessage());
}
}

View File

@@ -0,0 +1,147 @@
package com.xiaojukeji.know.streaming.km.rest.interceptor;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.didiglobal.logi.security.common.constant.Constants;
import com.didiglobal.logi.security.service.LoginService;
import com.xiaojukeji.know.streaming.km.account.KmAccountConfig;
import com.xiaojukeji.know.streaming.km.account.common.bizenum.LoginServiceNameEnum;
import com.xiaojukeji.know.streaming.km.account.login.trick.TrickJumpLoginService;
import com.xiaojukeji.know.streaming.km.common.component.HandleFactory;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.method.HandlerMethod;
import org.springframework.web.servlet.HandlerInterceptor;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.util.ArrayList;
import java.util.List;
import static com.didiglobal.logi.security.util.HttpRequestUtil.*;
/**
* 登陆拦截 && 权限校验
*/
@Component
public class PermissionInterceptor implements HandlerInterceptor {
private static final ILog LOGGER = LogFactory.getLog(PermissionInterceptor.class);
private static final String LOGIN_URL = Constants.ACCOUNT_LOGIN;
private static final String OPEN_URL_PREFIX = ApiPrefix.API_V3_OPEN_PREFIX;
@Autowired
private HandleFactory handleFactory;
@Autowired
private KmAccountConfig kmAccountConfig;
@Autowired
private TrickJumpLoginService trickJumpLoginService;
/**
* 拦截预处理
* @return boolean false:拦截, 不向下执行, true:放行
*/
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
if (hasNoInterceptor(request)) {
return true;
}
// 检查是否免登录调用
if (trickJumpLoginService.isOpenTrickJumpLogin(request)) {
String userName = trickJumpLoginService.checkTrickJumpLogin(request);
if (userName != null) {
// 允许免登录调用接口
initLoginContext(request, response, userName, Constant.INVALID_CODE);
return true;
}
}
String classRequestMappingValue = null;
try {
classRequestMappingValue = getClassRequestMappingValue(handler);
} catch (Exception e) {
LOGGER.error(
"class=PermissionInterceptor||method=preHandle||uri={}||msg=parse class request-mapping failed",
request.getRequestURI(), e);
}
List<String> whiteMappingValues = new ArrayList<>();
whiteMappingValues.add(LOGIN_URL);
whiteMappingValues.add(OPEN_URL_PREFIX);
return this.getLoginService().interceptorCheck(request, response, classRequestMappingValue, whiteMappingValues);
}
/**************************************************** private method ****************************************************/
private LoginService getLoginService() {
LoginService loginService = handleFactory.getByClassNamePer(kmAccountConfig.getLoginServiceName(), LoginService.class);
if (loginService == null) {
LOGGER.error("method=getLoginService||specifiedLoginServiceName={}||msg=specified login service not exist and use default", kmAccountConfig.getLoginServiceName());
return handleFactory.getByClassNamePer(LoginServiceNameEnum.DEFAULT_LOGIN_NAME, LoginService.class);
}
return loginService;
}
/**
* 通过反射获取带有@RequestMapping的Controller
* @param handler 请求处理器
* @return @RequestMapping的value
*/
private String getClassRequestMappingValue(Object handler) {
RequestMapping requestMapping;
if (handler instanceof HandlerMethod) {
HandlerMethod hm = (HandlerMethod) handler;
requestMapping = hm.getMethod().getDeclaringClass().getAnnotation(RequestMapping.class);
} else if (handler instanceof org.springframework.web.servlet.mvc.Controller) {
org.springframework.web.servlet.mvc.Controller hm = (org.springframework.web.servlet.mvc.Controller) handler;
Class<? extends org.springframework.web.servlet.mvc.Controller> hmClass = hm.getClass();
requestMapping = hmClass.getAnnotation(RequestMapping.class);
} else {
requestMapping = handler.getClass().getAnnotation(RequestMapping.class);
}
if ((null == requestMapping) || requestMapping.value().length == 0) {
return null;
}
return requestMapping.value()[0];
}
/**
* 是否需要拦截
* just for test
*/
private boolean hasNoInterceptor(HttpServletRequest request) {
return Boolean.FALSE;
}
/**
* @see com.didiglobal.logi.security.service.impl.LoginServiceImpl@initLoginContext
* 由于initLoginContext是私有方法因此当前拷贝了一份代码出来
*/
private void initLoginContext(HttpServletRequest request, HttpServletResponse response, String userName, Integer userId) {
HttpSession session = request.getSession(true);
session.setMaxInactiveInterval( COOKIE_OR_SESSION_MAX_AGE_UNIT_SEC );
session.setAttribute(USER, userName);
session.setAttribute(USER_ID, userId);
Cookie cookieUserName = new Cookie(USER, userName);
cookieUserName.setMaxAge(COOKIE_OR_SESSION_MAX_AGE_UNIT_SEC);
cookieUserName.setPath("/");
Cookie cookieUserId = new Cookie(USER_ID, userId.toString());
cookieUserId.setMaxAge(COOKIE_OR_SESSION_MAX_AGE_UNIT_SEC);
cookieUserId.setPath("/");
response.addCookie(cookieUserName);
response.addCookie(cookieUserId);
}
}

View File

@@ -0,0 +1,96 @@
server:
port: 8080 # 服务端口
tomcat:
accept-count: 1000
max-connections: 10000
spring:
application:
name: know-streaming
profiles:
active: dev
main:
allow-bean-definition-overriding: true
jackson:
time-zone: GMT+8
datasource:
know-streaming: # know-streaming 自身数据库的配置
jdbc-url: jdbc:mariadb://127.0.0.1:3306/know_streaming?useUnicode=true&characterEncoding=utf8&jdbcCompliantTruncation=true&allowMultiQueries=true&useSSL=false&alwaysAutoGeneratedKeys=true&serverTimezone=GMT%2B8&allowPublicKeyRetrieval=true
username: root
password: 123456
driver-class-name: org.mariadb.jdbc.Driver
maximum-pool-size: 20
idle-timeout: 30000
connection-test-query: SELECT 1
logi-job: # know-streaming 依赖的 logi-job 模块的数据库的配置,默认与 know-streaming 的数据库配置保持一致即可
jdbc-url: jdbc:mariadb://127.0.0.1:3306/know_streaming?useUnicode=true&characterEncoding=utf8&jdbcCompliantTruncation=true&allowMultiQueries=true&useSSL=false&alwaysAutoGeneratedKeys=true&serverTimezone=GMT%2B8&allowPublicKeyRetrieval=true
username: root
password: 123456
driver-class-name: org.mariadb.jdbc.Driver
max-lifetime: 60000
init-sql: true
init-thread-num: 20
max-thread-num: 50
log-expire: 3 # 日志保存天数,以天为单位
app-name: know-streaming
claim-strategy: com.didiglobal.logi.job.core.consensual.RandomConsensual
logi-security: # know-streaming 依赖的 logi-security 模块的数据库的配置,默认与 know-streaming 的数据库配置保持一致即可
jdbc-url: jdbc:mariadb://127.0.0.1:3306/know_streaming?useUnicode=true&characterEncoding=utf8&jdbcCompliantTruncation=true&allowMultiQueries=true&useSSL=false&alwaysAutoGeneratedKeys=true&serverTimezone=GMT%2B8&allowPublicKeyRetrieval=true
username: root
password: 123456
driver-class-name: org.mariadb.jdbc.Driver
app-name: know-streaming
resource-extend-bean-name: myResourceExtendImpl
logging:
config: classpath:logback-spring.xml
# 线程池大小相关配置
thread-pool:
scheduled:
thread-num: 2 # @Scheduled任务的线程池大小默认是一个
collector: # 采集模块的配置
future-util: # 采集模块线程池配置
num: 3 # 线程池个数
thread-num: 64 # 每个线程池核心线程数
queue-size: 10000 # 每个线程池队列大小
select-suitable-enable: true # 任务是否自动选择合适的线程池,非主要,可不修改
suitable-queue-size: 1000 # 线程池理想的队列大小,非主要,可不修改
task: # 任务模块的配置
heaven: # 采集任务配置
thread-num: 20 # 采集任务线程池核心线程数
queue-size: 1000 # 采集任务线程池队列大小
# 客户端池大小相关配置
client-pool:
kafka-consumer:
min-idle-client-num: 2 # 最小空闲客户端数
max-idle-client-num: 20 # 最大空闲客户端数
max-total-client-num: 20 # 最大客户端数
borrow-timeout-unit-ms: 5000 # 租借超时时间,单位秒
# es客户端服务地址
es.client.address: 127.0.0.1:8061
# 普罗米修斯指标导出相关配置
management:
endpoints:
web:
base-path: /metrics
exposure:
include: '*'
health:
elasticsearch:
enabled: false
metrics:
export:
prometheus:
descriptions: true
enabled: true
tags:
application: know-streaming

View File

@@ -0,0 +1,63 @@
<assembly>
<id>assembly</id>
<formats>
<format>tar</format>
<format>zip</format>
</formats>
<fileSets>
<fileSet>
<directory>src/main/resources/bin</directory>
<outputDirectory>bin</outputDirectory>
<includes>
<include>control.sh</include>
<include>start.bat</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
<directory>src/main/resources</directory>
<outputDirectory>config</outputDirectory>
<includes>
<include>*.properties</include>
<include>*.xml</include>
<include>*.yml</include>
<include>env/dev/*</include>
<include>env/qa/*</include>
<include>env/uat/*</include>
<include>env/prod/*</include>
</includes>
</fileSet>
<fileSet>
<directory>target</directory>
<outputDirectory>lib</outputDirectory>
<includes>
<!--
<include>*release*.jar</include>
-->
<include>kafka-manager-web*.jar</include>
</includes>
<excludes>
<exclude>*sources.jar</exclude>
</excludes>
</fileSet>
<fileSet>
<directory>src/main/resources</directory>
<outputDirectory>logs</outputDirectory>
<fileMode>0755</fileMode>
<excludes>
<exclude>**/*</exclude>
</excludes>
</fileSet>
<!-- <fileSet>
<directory>${project.build.directory}/asciidoc</directory>
<outputDirectory>docs</outputDirectory>
<includes>
<include>md/*</include>
<include>html/*</include>
<include>pdf/*</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>-->
</fileSets>
</assembly>

View File

@@ -0,0 +1,236 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="10 seconds">
<contextName>logback</contextName>
<property name="log.path" value="./logs" />
<!-- 彩色日志 -->
<!-- 彩色日志依赖的渲染类 -->
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<!-- 彩色日志格式 -->
<property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<!--输出到控制台-->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>info</level>
</filter>
<encoder>
<Pattern>${CONSOLE_LOG_PATTERN}</Pattern>
<charset>UTF-8</charset>
</encoder>
</appender>
<!--输出到文件-->
<!-- 时间滚动输出 level为 DEBUG 日志 -->
<appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/log_debug.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志归档 -->
<fileNamePattern>${log.path}/log_debug_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>2</maxHistory>
<totalSizeCap>5GB</totalSizeCap>
</rollingPolicy>
<!-- 此日志文件只记录debug级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>debug</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 时间滚动输出 level为 INFO 日志 -->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文件的路径及文件名 -->
<file>${log.path}/log_info.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 每天日志归档路径以及格式 -->
<fileNamePattern>${log.path}/log_info_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>7</maxHistory>
</rollingPolicy>
<!-- 此日志文件只记录info级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>info</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 时间滚动输出 level为 WARN 日志 -->
<appender name="WARN_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文件的路径及文件名 -->
<file>${log.path}/log_warn.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/log_warn_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>7</maxHistory>
</rollingPolicy>
<!-- 此日志文件只记录warn级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>warn</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 时间滚动输出 level为 ERROR 日志 -->
<appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文件的路径及文件名 -->
<file>${log.path}/log_error.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/log_error_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>7</maxHistory>
</rollingPolicy>
<!-- 此日志文件只记录ERROR级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- Metrics信息收集日志 -->
<appender name="ES_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/es/es.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/es/es_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<maxHistory>3</maxHistory>
</rollingPolicy>
</appender>
<!-- Metrics信息收集日志 -->
<appender name="METRIC_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/metric/metrics.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/metric/metrics_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<maxHistory>3</maxHistory>
</rollingPolicy>
</appender>
<!-- Task信息收集日志 -->
<appender name="TASK_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/task/task.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/task/task_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<maxHistory>3</maxHistory>
</rollingPolicy>
</appender>
<!-- level为 DEBUG 日志,时间滚动输出 -->
<appender name="logIJobLogger" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/logIJob/logIJob.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<FileNamePattern>${log.path}/logIJob/logIJob.log.%d{yyyy-MM-dd}</FileNamePattern>
<maxHistory>${maxHistory}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
</appender>
<logger name="ES_LOGGER" level="ERROR" additivity="false">
<appender-ref ref="ES_LOGGER"/>
</logger>
<logger name="METRIC_LOGGER" level="ERROR" additivity="false">
<appender-ref ref="METRIC_LOGGER"/>
</logger>
<logger name="TASK_LOGGER" level="ERROR" additivity="false">
<appender-ref ref="TASK_LOGGER"/>
</logger>
<logger name="com.didiglobal.logi.job" level="ERROR" additivity="false">
<appender-ref ref="logIJobLogger" />
</logger>
<logger name="org.apache.zookeeper" level="WARN" additivity="false" />
<logger name="org.apache.ibatis" level="INFO" additivity="false" />
<logger name="org.mybatis.spring" level="INFO" additivity="false" />
<logger name="com.github.miemiedev.mybatis.paginator" level="INFO" additivity="false" />
<root level="debug">
<appender-ref ref="CONSOLE" />
<appender-ref ref="DEBUG_FILE" />
<appender-ref ref="INFO_FILE" />
<appender-ref ref="WARN_FILE" />
<appender-ref ref="ERROR_FILE" />
<!--<appender-ref ref="METRICS_LOG" />-->
</root>
<!--生产环境:输出到文件-->
<!--<springProfile name="pro">-->
<!--<root level="info">-->
<!--<appender-ref ref="CONSOLE" />-->
<!--<appender-ref ref="DEBUG_FILE" />-->
<!--<appender-ref ref="INFO_FILE" />-->
<!--<appender-ref ref="ERROR_FILE" />-->
<!--<appender-ref ref="WARN_FILE" />-->
<!--</root>-->
<!--</springProfile>-->
</configuration>

View File

@@ -0,0 +1,46 @@
package com.xiaojukeji.know.streaming.km;
import com.xiaojukeji.know.streaming.km.rest.KnowStreaming;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.http.HttpHeaders;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
/**
* @author d06679
* @date 2019/4/11
*
* 得使用随机端口号,这样行执行单元测试的时候,不会出现端口号占用的情况
*/
@ActiveProfiles("test")
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = KnowStreaming.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
public class KnowStreamApplicationTest {
protected HttpHeaders headers;
@LocalServerPort
private Integer port;
@BeforeEach
public void setUp() {
// 获取 springboot server 监听的端口号
// port = applicationContext.getWebServer().getPort();
System.out.println( String.format("port is : [%d]", port));
//
// headers = new HttpHeaders();
// headers.add("X-SSO-USER", "zengqiao");
}
@Test
public void test() {
Assertions.assertNotNull(port);
}
}

View File

@@ -0,0 +1,62 @@
package com.xiaojukeji.know.streaming.km.core;
import com.xiaojukeji.know.streaming.km.KnowStreamApplicationTest;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricsTopicDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.TopicMetrics;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchFuzzy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchPage;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchSort;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO;
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicMetricService;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
public class TopicMetricServiceTest extends KnowStreamApplicationTest {
@Autowired
private TopicMetricService topicMetricService;
@Test
public void listTopicMetricsFromESTest(){
Long clusterId = 1l;
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 3600 * 1000;
MetricsTopicDTO dto = new MetricsTopicDTO();
dto.setStartTime(startTime);
dto.setEndTime(endTime);
dto.setTopNu(0);
List<String> metricName = new ArrayList<>();
metricName.add("LogSize");
dto.setMetricsNames(metricName);
List<String> topicName = new ArrayList<>();
topicName.add("__consumer_offsets");
dto.setTopics(topicName);
Result<List<MetricMultiLinesVO>> ret = topicMetricService.listTopicMetricsFromES(clusterId, dto);
assert ret.successful();
}
@Test
public void pagingTopicWithLatestMetricsFromESTest(){
Long clusterId = 2l;
List<String> metricNameList = new ArrayList<>();
SearchSort sort = new SearchSort();
sort.setQueryName("LogSize");
SearchFuzzy fuzzy = new SearchFuzzy();
SearchPage page = new SearchPage();
PaginationResult<TopicMetrics> result = topicMetricService.pagingTopicWithLatestMetricsFromES(
clusterId, metricNameList, sort, fuzzy, null, null, page);
assert result.successful();
}
}

View File

@@ -0,0 +1,94 @@
package com.xiaojukeji.know.streaming.km.persistence.es;
import com.xiaojukeji.know.streaming.km.KnowStreamApplicationTest;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchTerm;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchRange;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchSort;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
import com.xiaojukeji.know.streaming.km.persistence.es.dao.BrokerMetricESDAO;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
public class BrokerMetricESDAOTest extends KnowStreamApplicationTest {
@Autowired
private BrokerMetricESDAO brokerMetriceESDAO;
@Test
public void buildSortDslTest(){
SearchSort sort = new SearchSort("age", true);
SearchSort def = new SearchSort("timestamp", true);
String sortDsl = brokerMetriceESDAO.buildSortDsl(sort, def);
System.out.println(sortDsl);
}
@Test
public void buildRangeDslTest(){
SearchRange sort = new SearchRange("age", 1232321f, 45345345345f);
String sortDsl = brokerMetriceESDAO.buildRangeDsl(sort);
System.out.println(sortDsl);
}
@Test
public void buildMatchDslTest(){
List<SearchTerm> matches = new ArrayList<>();
matches.add(new SearchTerm("abc", "3"));
matches.add(new SearchTerm("dce", "345"));
String matchDsl = brokerMetriceESDAO.buildMatchDsl(matches);
System.out.println(matchDsl);
}
@Test
public void getBrokerMetricsPointTest(){
Long clusterId = 2L;
Integer brokerId = 1;
List<String> metrics = Arrays.asList("BytesIn", "BytesIn_min_5");
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
Map<String/*metric*/, MetricPointVO> metricPointVOS = brokerMetriceESDAO.getBrokerMetricsPoint(
clusterId, brokerId, metrics, "avg", startTime, endTime);
assert null != metricPointVOS;
}
@Test
public void listBrokerMetricesByBrokerIdsTest(){
Long clusterId = 123L;
List<String> metrics = Arrays.asList("BytesInPerSec_min_1", "BytesInPerSec_min_15");
List<Long> brokerIds = Arrays.asList(1L);
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
brokerMetriceESDAO.listBrokerMetricsByBrokerIds(clusterId, metrics, "avg", brokerIds, startTime, endTime);
}
@Test
public void listBrokerMetricsByTopTest(){
Long clusterId = 123L;
List<String> metrics = Arrays.asList("BytesInPerSec_min_1", "BytesInPerSec_min_15");
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
brokerMetriceESDAO.listBrokerMetricsByTop(clusterId, new ArrayList<>(), metrics, "avg", 5, startTime, endTime);
}
@Test
public void getTopBrokerIdsTest(){
Long clusterId = 123L;
List<String> metrics = Arrays.asList("BytesInPerSec_min_1", "BytesInPerSec_min_15");
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
brokerMetriceESDAO.getTopNBrokerIds(clusterId, metrics, "avg", 5, startTime, endTime);
}
}

View File

@@ -0,0 +1,50 @@
package com.xiaojukeji.know.streaming.km.persistence.es;
import com.xiaojukeji.know.streaming.km.KnowStreamApplicationTest;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchTerm;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchPage;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchRange;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchSort;
import com.xiaojukeji.know.streaming.km.persistence.es.dao.ClusterMetricESDAO;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class ClusterMetricESDAOTest extends KnowStreamApplicationTest {
@Autowired
private ClusterMetricESDAO clusterMetricESDAO;
@Test
public void listClusterMetricsByClusterIdsTest(){
List<String> metrics = Arrays.asList("BytesIn_min_1", "BytesOut_min_1");
List<Long> clusterIds = Arrays.asList(123L);
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
clusterMetricESDAO.listClusterMetricsByClusterIds(metrics, "avg", clusterIds, startTime, endTime);
}
@Test
public void pagingClusterWithLatestMetricsTest(){
List<Long> clusterIds = new ArrayList<>();
List<String> metricList = new ArrayList<>();
List<SearchTerm> searchMatches = new ArrayList<>();
SearchTerm match = new SearchTerm("Zookeepers", "3");
match.setMetric(true);
searchMatches.add(match);
SearchSort sort = new SearchSort("Replicas", true);
sort.setMetric(true);
SearchRange range = new SearchRange("Brokers", 1, 100);
range.setMetric(true);
SearchPage page = new SearchPage();
// clusterMetricESDAO.pagingClusterWithLatestMetrics(searchMatches, sort, range);
}
}

View File

@@ -0,0 +1,92 @@
package com.xiaojukeji.know.streaming.km.persistence.es;
import com.google.common.collect.Table;
import com.xiaojukeji.know.streaming.km.KnowStreamApplicationTest;
import com.xiaojukeji.know.streaming.km.common.bean.entity.group.GroupTopic;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchTerm;
import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.TopicPartitionKS;
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.GroupMetricPO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
import com.xiaojukeji.know.streaming.km.common.enums.AggTypeEnum;
import com.xiaojukeji.know.streaming.km.persistence.es.dao.GroupMetricESDAO;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
public class GroupMetricESDAOTest extends KnowStreamApplicationTest {
@Autowired
private GroupMetricESDAO groupMetricESDAO;
@Test
public void listLatestMetricsAggByGroupTopicTest(){
Long clusterPhyId = 2L;
List<GroupTopic> groupTopicList = new ArrayList<>();
groupTopicList.add(new GroupTopic("g-know-streaming-123456", "know-streaming-test-251"));
groupTopicList.add(new GroupTopic("test_group", "know-streaming-test-251"));
List<String> metrics = Arrays.asList("OffsetConsumed", "Lag");
AggTypeEnum aggType = AggTypeEnum.AVG;
List<GroupMetricPO> groupMetricPOS = groupMetricESDAO.listLatestMetricsAggByGroupTopic(clusterPhyId, groupTopicList, metrics, aggType);
assert !CollectionUtils.isEmpty(groupMetricPOS);
}
@Test
public void listGroupTopicPartitionsTest(){
Long clusterId = 2L;
String groupName = "g-know-streaming-123456";
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 24 * 3600 * 1000;
Set<TopicPartitionKS> topicPartitionKS = groupMetricESDAO.listGroupTopicPartitions(clusterId, groupName, startTime, endTime);
assert null != topicPartitionKS;
}
@Test
public void listPartitionLatestMetricsTest(){
Long clusterId = 2L;
String groupName = "test_group_20220421";
String topicName = "know-streaming-test-251";
List<GroupMetricPO> groupMetricPOS = groupMetricESDAO.listPartitionLatestMetrics(clusterId, groupName, topicName, null);
assert !CollectionUtils.isEmpty(groupMetricPOS);
}
@Test
public void countMetricValueTest(){
Long clusterId = 3L;
String groupName = "test_group";
SearchTerm searchTerm = new SearchTerm("HealthCheckTotal", "1", false);
searchTerm.setMetric(true);
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 24 * 3600 * 1000;
Integer i = groupMetricESDAO.countMetricValue(clusterId, groupName, searchTerm, startTime, endTime);
assert null != i;
}
@Test
public void listGroupMetricsTest(){
Long clusterId = 2L;
String groupName = "g-know-streaming-123456";
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 24 * 3600 * 1000;
List<TopicPartitionKS> topicPartitionKS = new ArrayList<>();
topicPartitionKS.add(new TopicPartitionKS("know-streaming-test-251", 4));
List<String> metrics = new ArrayList<>();
metrics.add("OffsetConsumed");
Table<String/*metric*/, String/*topic&partition*/, List<MetricPointVO>> multiLinesVOS = groupMetricESDAO.listGroupMetrics(
clusterId,groupName,topicPartitionKS, metrics, "avg", startTime, endTime);
assert null != multiLinesVOS;
}
}

View File

@@ -0,0 +1,27 @@
package com.xiaojukeji.know.streaming.km.persistence.es;
import com.xiaojukeji.know.streaming.km.KnowStreamApplicationTest;
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.PartitionMetricPO;
import com.xiaojukeji.know.streaming.km.persistence.es.dao.PartitionMetricESDAO;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
public class PartitionMetricESDAOTest extends KnowStreamApplicationTest {
@Autowired
private PartitionMetricESDAO partitionMetricESDAO;
@Test
public void listPartitionLatestMetricsByTopicTest(){
Long clusterPhyId = 2L;
String topic = "__consumer_offsets";
List<PartitionMetricPO> partitionMetricPOS = partitionMetricESDAO.listPartitionLatestMetricsByTopic(
clusterPhyId, topic, new ArrayList<>());
assert null != partitionMetricPOS;
}
}

View File

@@ -0,0 +1,27 @@
package com.xiaojukeji.know.streaming.km.persistence.es;
import com.xiaojukeji.know.streaming.km.KnowStreamApplicationTest;
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.ReplicationMetricPO;
import com.xiaojukeji.know.streaming.km.persistence.es.dao.ReplicationMetricESDAO;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
public class ReplicationMetricESDAOTest extends KnowStreamApplicationTest {
@Autowired
private ReplicationMetricESDAO replicationMetricESDAO;
@Test
public void getReplicationLatestMetricsTest(){
Long clusterPhyId = 2l;
Integer brokerId = 1;
String topic = "know-streaming-test-251";
Integer partitionId = 1;
ReplicationMetricPO replicationMetricPO = replicationMetricESDAO.getReplicationLatestMetrics(
clusterPhyId, brokerId, topic, partitionId, new ArrayList<>());
assert null != replicationMetricPO;
}
}

View File

@@ -0,0 +1,130 @@
package com.xiaojukeji.know.streaming.km.persistence.es;
import com.google.common.collect.Table;
import com.xiaojukeji.know.streaming.km.KnowStreamApplicationTest;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchFuzzy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchTerm;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchSort;
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.TopicMetricPO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
import com.xiaojukeji.know.streaming.km.persistence.es.dao.TopicMetricESDAO;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
public class TopicMetricESDAOTest extends KnowStreamApplicationTest {
@Autowired
private TopicMetricESDAO topicMetricESDAO;
@Test
public void listTopicMaxMinMetricsTest(){
Long clusterId = 2L;
String topic = "know-streaming-test-251";
String topic1 = "topic_test01";
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
List<TopicMetricPO> ret = topicMetricESDAO.listTopicMaxMinMetrics(
clusterId, Arrays.asList(topic, topic1), "BytesIn", false, startTime, endTime);
assert null != ret;
}
@Test
public void getTopicsAggsMetricsValueTest(){
Long clusterId = 2L;
String topic = "know-streaming-test-251";
String topic1 = "topic_test01";
List<String> metrics = Arrays.asList("BytesIn", "BytesIn_min_5");
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
Table<String/*topics*/, String/*metric*/, MetricPointVO> ret = topicMetricESDAO.getTopicsAggsMetricsValue(
clusterId, Arrays.asList(topic, topic1), metrics, "max", startTime, endTime);
assert null != ret;
}
@Test
public void listTopicWithLatestMetricsTest(){
Long clusterId = 2L;
SearchSort sort = new SearchSort("LogSize", true);
sort.setMetric(true);
SearchFuzzy fuzzy = new SearchFuzzy("topic", "know");
List<SearchTerm> terms = new ArrayList<>();
List<TopicMetricPO> topicMetricPOS = topicMetricESDAO.listTopicWithLatestMetrics(clusterId, sort, fuzzy, null, terms);
assert !CollectionUtils.isEmpty(topicMetricPOS);
}
@Test
public void getTopicLatestMetricByBrokerIdTest(){
Long clusterId = 2L;
String topic = "know-streaming-test-251";
Integer brokerId = 1;
TopicMetricPO topicMetricPO = topicMetricESDAO.getTopicLatestMetricByBrokerId(clusterId, topic, brokerId, new ArrayList<>());
assert null != topicMetricPO;
}
@Test
public void getTopicLatestMetricTest(){
Long clusterId = 2L;
String topic = "know-streaming-test-251";
TopicMetricPO topicMetricPO = topicMetricESDAO.getTopicLatestMetric(clusterId, topic, new ArrayList<>());
assert null != topicMetricPO;
}
@Test
public void listTopicLatestMetricTest(){
Long clusterId = 2L;
String topic = "know-streaming-test-251";
String topic1 = "know-streaming-123";
String topic2 = "1209test";
List<String> metrics = Arrays.asList("BytesIn", "BytesIn_min_5");
List<TopicMetricPO> topicMetricPO = topicMetricESDAO.listTopicLatestMetric(clusterId, Arrays.asList(topic,topic1,topic2), metrics);
assert null != topicMetricPO;
}
@Test
public void listBrokerMetricsByTopicsTest(){
Long clusterId = 2L;
List<String> metrics = Arrays.asList("BytesIn", "BytesIn_min_5");
List<String> topics = Arrays.asList("QAtest_1_13", "__consumer_offsets");
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
topicMetricESDAO.listTopicMetricsByTopics(clusterId, metrics, "avg", topics, startTime, endTime);
}
@Test
public void countMetricValueOccurrencesTest(){
Long clusterPhyId = 2L;
String topic = "__consumer_offsets";
String metricName = "HealthCheckPassed";
Float metricValue = 2f;
boolean equalMetricValue = true;
SearchTerm searchMatch = new SearchTerm(metricName, metricValue.toString(), equalMetricValue);
searchMatch.setMetric(true);
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
Integer i = topicMetricESDAO.countMetricValue(clusterPhyId, topic, searchMatch, startTime, endTime);
assert null != i;
}
}