合并3.3.0分支

This commit is contained in:
zengqiao
2023-02-24 17:13:50 +08:00
616 changed files with 32894 additions and 8421 deletions

View File

@@ -18,7 +18,9 @@
<log4j2.version>2.16.0</log4j2.version>
<springboot.version>2.3.7.RELEASE</springboot.version>
<spring.version>5.3.18</spring.version>
<spring.version>5.3.19</spring.version>
<maven.test.skip>false</maven.test.skip>
</properties>
<dependencies>
@@ -64,6 +66,12 @@
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>com.xiaojukeji.kafka</groupId>
<artifactId>km-ha</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
@@ -111,10 +119,6 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
@@ -143,6 +147,26 @@
<artifactId>spring-boot-actuator-autoconfigure</artifactId>
<version>${springboot.version}</version>
</dependency>
<!--testcontainers-->
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>kafka</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>mysql</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>elasticsearch</artifactId>
<scope>test</scope>
</dependency>
<!--testcontainers end-->
</dependencies>
<build>

View File

@@ -0,0 +1,133 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.cluster;
import com.xiaojukeji.know.streaming.km.biz.cluster.ClusterConnectorsManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.ClusterConnectorsOverviewDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.connect.MetricsConnectClustersDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.connect.MetricsConnectorsDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connect.ConnectClusterBasicCombineExistVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connect.ConnectClusterBasicVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector.ClusterWorkerOverviewVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector.ConnectorBasicVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector.ClusterConnectorOverviewVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connect.ConnectStateVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.converter.ConnectConverter;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterMetricService;
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorMetricService;
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 22/10/27
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "集群Connects-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX) // 这里使用 API_V3_PREFIX 没有使用 API_V3_CONNECT_PREFIX 的原因是这个接口在Kafka集群页面下
public class ClusterConnectsController {
@Autowired
private ConnectorService connectorService;
@Autowired
private ConnectorMetricService connectorMetricService;
@Autowired
private ConnectClusterService connectClusterService;
@Autowired
private ConnectClusterMetricService connectClusterMetricService;
@Autowired
private WorkerService workerService;
@Autowired
private ClusterConnectorsManager clusterConnectorsManager;
/**************************************************** connect method ****************************************************/
@ApiOperation(value = "Connect集群基本信息", notes = "")
@GetMapping(value = "kafka-clusters/{clusterPhyId}/connect-clusters/{connectClusterName}/basic-combine-exist")
@ResponseBody
public Result<ConnectClusterBasicCombineExistVO> getBasicCombineExist(@PathVariable Long clusterPhyId,
@PathVariable String connectClusterName) {
return Result.buildSuc(ConnectConverter.convert2ConnectClusterBasicCombineExistVO(
connectClusterService.getByName(clusterPhyId, connectClusterName))
);
}
@ApiOperation(value = "Connect集群基本信息列表", notes = "")
@GetMapping(value = "kafka-clusters/{clusterPhyId}/connect-clusters-basic")
@ResponseBody
public Result<List<ConnectClusterBasicVO>> getClusterConnectClustersBasic(@PathVariable Long clusterPhyId) {
return Result.buildSuc(ConvertUtil.list2List(connectClusterService.listByKafkaCluster(clusterPhyId), ConnectClusterBasicVO.class));
}
@ApiOperation(value = "Connect集群指标信息")
@PostMapping(value = "kafka-clusters/{clusterPhyId}/connect-cluster-metrics")
@ResponseBody
public Result<List<MetricMultiLinesVO>> getConnectClusterMetrics(@PathVariable Long clusterPhyId,
@Validated @RequestBody MetricsConnectClustersDTO dto) {
return connectClusterMetricService.listConnectClusterMetricsFromES(clusterPhyId, dto);
}
@ApiOperation(value = "集群Connectors状态", notes = "")
@GetMapping(value = "kafka-clusters/{clusterPhyId}/connect-state")
@ResponseBody
public Result<ConnectStateVO> getClusterConnectorsState(@PathVariable Long clusterPhyId) {
return Result.buildSuc(clusterConnectorsManager.getClusterConnectorsState(clusterPhyId));
}
/**************************************************** connector method ****************************************************/
@ApiOperation(value = "Connectors基本信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/connectors-basic")
@ResponseBody
public Result<List<ConnectorBasicVO>> getClusterConnectorsBasic(@PathVariable Long clusterPhyId) {
return Result.buildSuc(
ConnectConverter.convert2BasicVOList(
connectClusterService.listByKafkaCluster(clusterPhyId),
connectorService.listByKafkaClusterIdFromDB(clusterPhyId)
)
);
}
@ApiOperation(value = "Connectors概览列表", notes = "")
@PostMapping(value = "clusters/{clusterPhyId}/connectors-overview")
@ResponseBody
public PaginationResult<ClusterConnectorOverviewVO> getClusterConnectorsOverview(@PathVariable Long clusterPhyId,
@Validated @RequestBody ClusterConnectorsOverviewDTO dto) {
return clusterConnectorsManager.getClusterConnectorsOverview(clusterPhyId, dto);
}
@ApiOperation(value = "集群Connectors指标信息")
@PostMapping(value = "clusters/{clusterPhyId}/connectors-metrics")
@ResponseBody
public Result<List<MetricMultiLinesVO>> getClusterPhyMetrics(@PathVariable Long clusterPhyId,
@Validated @RequestBody MetricsConnectorsDTO dto) {
return connectorMetricService.listConnectClusterMetricsFromES(clusterPhyId, dto);
}
/**************************************************** connector method ****************************************************/
@ApiOperation(value = "worker概览列表", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/workers-overview")
@ResponseBody
public PaginationResult<ClusterWorkerOverviewVO> getClusterWorkersOverview(@PathVariable Long clusterPhyId, PaginationBaseDTO dto) {
return workerService.pageWorkByKafkaClusterPhy(clusterPhyId, dto);
}
}

View File

@@ -2,21 +2,20 @@ package com.xiaojukeji.know.streaming.km.rest.api.v3.cluster;
import com.xiaojukeji.know.streaming.km.biz.group.GroupManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.ClusterGroupSummaryDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.ClusterGroupsOverviewDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricGroupPartitionDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationBaseDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.field.PaginationFuzzySearchFieldDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.TopicPartitionKS;
import com.xiaojukeji.know.streaming.km.common.bean.vo.group.GroupOverviewVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.group.GroupTopicBasicVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.group.GroupTopicOverviewVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.utils.Tuple;
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.core.service.group.GroupMetricService;
import com.xiaojukeji.know.streaming.km.core.service.group.GroupService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
@@ -37,24 +36,17 @@ public class ClusterGroupsController {
@Autowired
private GroupManager groupManager;
@Autowired
private GroupService groupService;
@Autowired
private GroupMetricService groupMetricService;
@Deprecated
@ApiOperation(value = "集群Groups信息列表", notes = "废弃, 下一个版本删除")
@PostMapping(value = "clusters/{clusterPhyId}/groups-overview")
@ApiOperation(value = "集群Groups信息列表")
@GetMapping(value = "clusters/{clusterPhyId}/groups-basic")
@ResponseBody
public PaginationResult<GroupTopicOverviewVO> getClusterPhyGroupsOverview(@PathVariable Long clusterPhyId,
@RequestBody ClusterGroupsOverviewDTO dto) {
Tuple<String, String> searchKeyTuple = this.getSearchKeyWords(dto);
return groupManager.pagingGroupMembers(
clusterPhyId,
dto.getTopicName(),
dto.getGroupName(),
searchKeyTuple.getV1(),
searchKeyTuple.getV2(),
dto
);
public Result<List<GroupTopicBasicVO>> getGroupsBasic(@PathVariable Long clusterPhyId) {
return Result.buildSuc(ConvertUtil.list2List(groupService.listGroupByCluster(clusterPhyId), GroupTopicBasicVO.class));
}
@ApiOperation(value = "集群Groups信息列表")
@@ -90,24 +82,4 @@ public class ClusterGroupsController {
}
/**************************************************** private method ****************************************************/
@Deprecated
private Tuple<String, String> getSearchKeyWords(ClusterGroupsOverviewDTO dto) {
if (ValidateUtils.isEmptyList(dto.getFuzzySearchDTOList())) {
return new Tuple<>("", "");
}
String searchTopicName = "";
String searchGroupName = "";
for (PaginationFuzzySearchFieldDTO searchFieldDTO: dto.getFuzzySearchDTOList()) {
if (searchFieldDTO.getFieldName().equals("topicName")) {
searchTopicName = searchFieldDTO.getFieldValue();
}
if (searchFieldDTO.getFieldName().equals("groupName")) {
searchGroupName = searchFieldDTO.getFieldValue();
}
}
return new Tuple<>(searchTopicName, searchGroupName);
}
}

View File

@@ -0,0 +1,82 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.cluster;
import com.xiaojukeji.know.streaming.km.biz.connect.mm2.MirrorMakerManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.ClusterMirrorMakersOverviewDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.mm2.MetricsMirrorMakersDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.mm2.ClusterMirrorMakerOverviewVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.mm2.MirrorMakerBasicVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.mm2.MirrorMakerStateVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant;
import com.xiaojukeji.know.streaming.km.common.converter.ConnectConverter;
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
import com.xiaojukeji.know.streaming.km.core.service.connect.mm2.MirrorMakerMetricService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.stream.Collectors;
/**
* @author zengqiao
* @date 22/12/12
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "集群MM2s-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX) // 这里使用 API_V3_PREFIX 没有使用 API_V3_CONNECT_PREFIX 的原因是这个接口在Kafka集群页面下
public class ClusterMirrorMakersController {
@Autowired
private MirrorMakerMetricService mirrorMakerMetricService;
@Autowired
private MirrorMakerManager mirrorMakerManager;
@Autowired
private ConnectClusterService connectClusterService;
@Autowired
private ConnectorService connectorService;
@ApiOperation(value = "集群MM2状态", notes = "")
@GetMapping(value = "kafka-clusters/{clusterPhyId}/mirror-makers-state")
@ResponseBody
public Result<MirrorMakerStateVO> getClusterMM2State(@PathVariable Long clusterPhyId) {
return mirrorMakerManager.getMirrorMakerStateVO(clusterPhyId);
}
@ApiOperation(value = "集群MM2基本信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/mirror-makers-basic")
@ResponseBody
public Result<List<MirrorMakerBasicVO>> getClusterMirrorMakersBasic(@PathVariable Long clusterPhyId) {
return Result.buildSuc(
ConnectConverter.convert2MirrorMakerBasicVOList(
connectClusterService.listByKafkaCluster(clusterPhyId),
connectorService.listByKafkaClusterIdFromDB(clusterPhyId).stream().filter(elem -> elem.getConnectorClassName().equals(KafkaConnectConstant.MIRROR_MAKER_SOURCE_CONNECTOR_TYPE)).collect(Collectors.toList())
)
);
}
@ApiOperation(value = "集群MM2概览列表", notes = "")
@PostMapping(value = "clusters/{clusterPhyId}/mirror-makers-overview")
@ResponseBody
public PaginationResult<ClusterMirrorMakerOverviewVO> getClusterMirrorMakersOverview(@PathVariable Long clusterPhyId,
@Validated @RequestBody ClusterMirrorMakersOverviewDTO dto) {
return mirrorMakerManager.getClusterMirrorMakersOverview(clusterPhyId,dto);
}
@ApiOperation(value = "集群MM2指标信息")
@PostMapping(value = "clusters/{clusterPhyId}/mirror-makers-metrics")
@ResponseBody
public Result<List<MetricMultiLinesVO>> getClusterMirrorMakersMetrics(@PathVariable Long clusterPhyId,
@Validated @RequestBody MetricsMirrorMakersDTO dto) {
return mirrorMakerMetricService.listMirrorMakerClusterMetricsFromES(clusterPhyId, dto);
}
}

View File

@@ -4,6 +4,7 @@ import com.xiaojukeji.know.streaming.km.biz.cluster.MultiClusterPhyManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.MultiClusterDashboardDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.PaginationResult;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.ClusterPhyBaseVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.ClusterPhysHealthStateVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.ClusterPhysStateVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.ClusterPhyDashboardVO;
@@ -37,10 +38,17 @@ public class MultiClusterPhyController {
@ApiOperation(value = "多物理集群-大盘", notes = "")
@PostMapping(value = "physical-clusters/dashboard")
@ResponseBody
public PaginationResult<ClusterPhyDashboardVO> getClusterPhyBasic(@RequestBody @Validated MultiClusterDashboardDTO dto) {
public PaginationResult<ClusterPhyDashboardVO> getClusterPhyDashboard(@RequestBody @Validated MultiClusterDashboardDTO dto) {
return multiClusterPhyManager.getClusterPhysDashboard(dto);
}
@ApiOperation(value = "多物理集群-基本信息", notes = "")
@GetMapping(value = "physical-clusters/basic")
@ResponseBody
public Result<List<ClusterPhyBaseVO>> getClusterPhyBasic() {
return multiClusterPhyManager.getClusterPhysBasic();
}
@ApiOperation(value = "多物理集群-状态", notes = "")
@GetMapping(value = "physical-clusters/state")
@ResponseBody

View File

@@ -0,0 +1,42 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.connect;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.cluster.ConnectClusterDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 22/10/17
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Connect-Cluster-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_CONNECT_PREFIX)
public class KafkaConnectClusterController {
@Autowired
private ConnectClusterService connectClusterService;
@ApiOperation(value = "删除Connect集群")
@DeleteMapping(value = "connect-clusters")
@ResponseBody
public Result<Void> deleteConnectCluster(@RequestParam("connectClusterId") Long connectClusterId) {
return connectClusterService.deleteInDB(connectClusterId, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "修改Connect集群", notes = "")
@PutMapping(value = "batch-connect-clusters")
@ResponseBody
public Result<Void> batchModifyConnectCluster(@Validated @RequestBody List<ConnectClusterDTO> dtoList) {
return connectClusterService.batchModifyInDB(dtoList, HttpRequestUtil.getOperator());
}
}

View File

@@ -0,0 +1,56 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.connect;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.config.ConnectConfigInfos;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.plugin.ConnectPluginBasic;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.connect.plugin.ConnectConfigInfosVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.connect.plugin.ConnectPluginBasicVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.core.service.connect.plugin.PluginService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 22/10/17
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Connect-Plugin-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_CONNECT_PREFIX)
public class KafkaConnectPluginController {
@Autowired
private PluginService pluginService;
@ApiOperation(value = "Connect集群插件", notes = "")
@GetMapping(value = "clusters/{connectClusterId}/connector-plugins")
@ResponseBody
public Result<List<ConnectPluginBasicVO>> getConnectorPlugins(@PathVariable Long connectClusterId) {
Result<List<ConnectPluginBasic>> listResult = pluginService.listPluginsFromCluster(connectClusterId);
if (listResult.failed()) {
return Result.buildFromIgnoreData(listResult);
}
listResult.getData().forEach(elem -> elem.setHelpDocLink("https://www.confluent.io/hub/"));
return Result.buildSuc(ConvertUtil.list2List(listResult.getData(), ConnectPluginBasicVO.class));
}
@ApiOperation(value = "Connect插件配置", notes = "")
@GetMapping(value = "clusters/{connectClusterId}/connector-plugins/{pluginName}/config")
@ResponseBody
public Result<ConnectConfigInfosVO> getPluginConfig(@PathVariable Long connectClusterId, @PathVariable String pluginName) {
Result<ConnectConfigInfos> infosResult = pluginService.getConfig(connectClusterId, pluginName);
if (infosResult.failed()) {
return Result.buildFromIgnoreData(infosResult);
}
return Result.buildSuc(ConvertUtil.obj2Obj(infosResult.getData(), ConnectConfigInfosVO.class));
}
}

View File

@@ -0,0 +1,90 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.connect;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.biz.connect.connector.ConnectorManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.connector.ConnectorActionDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.connector.ConnectorCreateDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.connector.ConnectorDeleteDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.config.ConnectConfigInfos;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
import com.xiaojukeji.know.streaming.km.common.bean.vo.connect.plugin.ConnectConfigInfosVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectActionEnum;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
import com.xiaojukeji.know.streaming.km.core.service.connect.plugin.PluginService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 22/10/17
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Connect-Connector自身-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_CONNECT_PREFIX)
public class KafkaConnectorController {
@Autowired
private ConnectorService connectorService;
@Autowired
private ConnectorManager connectorManager;
@Autowired
private PluginService pluginService;
@ApiOperation(value = "创建Connector", notes = "")
@PostMapping(value = "connectors")
@ResponseBody
public Result<Void> createConnector(@Validated @RequestBody ConnectorCreateDTO dto) {
return connectorManager.createConnector(dto, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "删除Connector", notes = "")
@DeleteMapping(value ="connectors")
@ResponseBody
public Result<Void> deleteConnectors(@Validated @RequestBody ConnectorDeleteDTO dto) {
return connectorService.deleteConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
}
@ApiOperation(value = "操作Connector", notes = "")
@PutMapping(value ="connectors")
@ResponseBody
public Result<Void> operateConnectors(@Validated @RequestBody ConnectorActionDTO dto) {
if (ConnectActionEnum.RESTART.getValue().equals(dto.getAction())) {
return connectorService.restartConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
} else if (ConnectActionEnum.STOP.getValue().equals(dto.getAction())) {
return connectorService.stopConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
} else if (ConnectActionEnum.RESUME.getValue().equals(dto.getAction())) {
return connectorService.resumeConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
}
return Result.buildFailure(ResultStatus.PARAM_ILLEGAL);
}
@ApiOperation(value = "修改Connector配置", notes = "")
@PutMapping(value ="connectors-config")
@ResponseBody
public Result<Void> modifyConnectors(@Validated @RequestBody ConnectorCreateDTO dto) {
return connectorManager.updateConnectorConfig(dto.getConnectClusterId(), dto.getConnectorName(), dto.getConfigs(), HttpRequestUtil.getOperator());
}
@ApiOperation(value = "校验Connector配置", notes = "")
@PutMapping(value ="connectors-config/validate")
@ResponseBody
public Result<ConnectConfigInfosVO> validateConnectors(@Validated @RequestBody ConnectorCreateDTO dto) {
Result<ConnectConfigInfos> infoResult = pluginService.validateConfig(dto.getConnectClusterId(), dto.getConfigs());
if (infoResult.failed()) {
return Result.buildFromIgnoreData(infoResult);
}
return Result.buildSuc(ConvertUtil.obj2Obj(infoResult.getData(), ConnectConfigInfosVO.class));
}
}

View File

@@ -0,0 +1,98 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.connect;
import com.xiaojukeji.know.streaming.km.biz.connect.connector.ConnectorManager;
import com.xiaojukeji.know.streaming.km.biz.connect.connector.WorkerConnectorManager;
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorInfo;
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectorMetrics;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.connector.ConnectorBasicCombineExistVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.connect.connector.ConnectorStateVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.connect.task.KCTaskOverviewVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.converter.ConnectConverter;
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorMetricService;
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Properties;
/**
* @author zengqiao
* @date 22/10/17
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Connect-Connector状态-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_CONNECT_PREFIX)
public class KafkaConnectorStateController {
@Autowired
private ConnectorService connectorService;
@Autowired
private ConnectorMetricService connectorMetricService;
@Autowired
private WorkerConnectorManager workerConnectorManager;
@Autowired
private ConnectorManager connectorManager;
@Autowired
private ConnectClusterService connectClusterService;
@ApiOperation(value = "Connectors基本信息", notes = "")
@GetMapping(value = "clusters/{connectClusterId}/connectors/{connectorName}/basic-combine-exist")
@ResponseBody
public Result<ConnectorBasicCombineExistVO> getConnectorBasicCombineExist(@PathVariable Long connectClusterId, @PathVariable String connectorName) {
return Result.buildSuc(
ConnectConverter.convert2BasicVO(
connectClusterService.getById(connectClusterId),
connectorService.getConnectorFromDB(connectClusterId, connectorName)
)
);
}
@ApiOperation(value = "Connector配置", notes = "")
@GetMapping(value = "clusters/{connectClusterId}/connectors/{connectorName}/config")
@ResponseBody
public Result<Properties> getConnectorConfig(@PathVariable Long connectClusterId, @PathVariable String connectorName) {
Result<KSConnectorInfo> connectorResult = connectorService.getConnectorInfoFromCluster(connectClusterId, connectorName);
if (connectorResult.failed()) {
return Result.buildFromIgnoreData(connectorResult);
}
Properties props = new Properties();
props.putAll(connectorResult.getData().getConfig());
return Result.buildSuc(props);
}
@ApiOperation(value = "获取Connector的Task列表", notes = "")
@GetMapping(value = "clusters/{connectClusterId}/connectors/{connectorName}/tasks")
@ResponseBody
public Result<List<KCTaskOverviewVO>> getConnectorTasks(@PathVariable Long connectClusterId, @PathVariable String connectorName) {
return workerConnectorManager.getTaskOverview(connectClusterId, connectorName);
}
@ApiOperation(value = "Connector近期指标")
@PostMapping(value = "clusters/{connectClusterId}/connectors/{connectorName}/latest-metrics")
@ResponseBody
public Result<ConnectorMetrics> getConnectorLatestMetrics(@PathVariable Long connectClusterId,
@PathVariable String connectorName,
@RequestBody List<String> metricsNames) {
return connectorMetricService.getLatestMetricsFromES(connectClusterId, connectorName, metricsNames);
}
@ApiOperation(value = "获取Connector的状态", notes = "")
@GetMapping(value = "clusters/{connectClusterId}/connectors/{connectorName}/state")
@ResponseBody
public Result<ConnectorStateVO> getConnectorStateVO(@PathVariable Long connectClusterId, @PathVariable String connectorName) {
return connectorManager.getConnectorStateVO(connectClusterId, connectorName);
}
}

View File

@@ -0,0 +1,33 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.connect;
import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.task.TaskActionDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerConnectorService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
/**
* @author zengqiao
* @date 22/10/17
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Connect-Task-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_CONNECT_PREFIX)
public class KafkaTaskController {
@Autowired
private WorkerConnectorService workerConnectorService;
@ApiOperation(value = "操作Task", notes = "")
@PutMapping(value ="tasks")
@ResponseBody
public Result<Void> actionTask(@Validated @RequestBody TaskActionDTO dto) {
return workerConnectorService.actionTask(dto);
}
}

View File

@@ -0,0 +1,76 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.connect.mm2;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.biz.connect.mm2.MirrorMakerManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.mm2.MirrorMaker2ActionDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.mm2.MirrorMakerCreateDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.mm2.MirrorMaker2DeleteDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
import com.xiaojukeji.know.streaming.km.common.bean.vo.connect.plugin.ConnectConfigInfosVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectActionEnum;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 22/12/12
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "MM2-MM2自身-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_MM2_PREFIX)
public class KafkaMirrorMakerController {
@Autowired
private MirrorMakerManager mirrorMakerManager;
@ApiOperation(value = "创建MM2", notes = "")
@PostMapping(value = "mirror-makers")
@ResponseBody
public Result<Void> createMM2(@Validated @RequestBody MirrorMakerCreateDTO dto) {
return mirrorMakerManager.createMirrorMaker(dto, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "删除MM2", notes = "")
@DeleteMapping(value ="mirror-makers")
@ResponseBody
public Result<Void> deleteMM2(@Validated @RequestBody MirrorMaker2DeleteDTO dto) {
return mirrorMakerManager.deleteMirrorMaker(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
}
@ApiOperation(value = "操作MM2", notes = "")
@PutMapping(value ="mirror-makers")
@ResponseBody
public Result<Void> operateMM2s(@Validated @RequestBody MirrorMaker2ActionDTO dto) {
if (ConnectActionEnum.RESTART.getValue().equals(dto.getAction())) {
return mirrorMakerManager.restartMirrorMaker(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
} else if (ConnectActionEnum.STOP.getValue().equals(dto.getAction())) {
return mirrorMakerManager.stopMirrorMaker(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
} else if (ConnectActionEnum.RESUME.getValue().equals(dto.getAction())) {
return mirrorMakerManager.resumeMirrorMaker(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
}
return Result.buildFailure(ResultStatus.PARAM_ILLEGAL);
}
@ApiOperation(value = "MM2配置修改", notes = "")
@PutMapping(value ="mirror-makers-config")
@ResponseBody
public Result<Void> modifyMM2s(@Validated @RequestBody MirrorMakerCreateDTO dto) {
return mirrorMakerManager.modifyMirrorMakerConfig(dto, HttpRequestUtil.getOperator());
}
@ApiOperation(value = "校验MM2配置", notes = "")
@PutMapping(value ="mirror-makers-config/validate")
@ResponseBody
public Result<List<ConnectConfigInfosVO>> validateConnectors(@Validated @RequestBody MirrorMakerCreateDTO dto) {
return mirrorMakerManager.validateConnectors(dto);
}
}

View File

@@ -0,0 +1,68 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.connect.mm2;
import com.didiglobal.logi.security.util.HttpRequestUtil;
import com.xiaojukeji.know.streaming.km.biz.connect.mm2.MirrorMakerManager;
import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.mm2.MirrorMakerCreateDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.mm2.MirrorMakerMetrics;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.mm2.MirrorMakerBaseStateVO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.connect.task.KCTaskOverviewVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.core.service.connect.mm2.MirrorMakerMetricService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
* @author zengqiao
* @date 22/12/12
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "MM2-MM2状态-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_MM2_PREFIX)
public class KafkaMirrorMakerStateController {
@Autowired
private MirrorMakerManager mirrorMakerManager;
@Autowired
private MirrorMakerMetricService mirrorMakerMetricService;
@ApiOperation(value = "获取mm2任务的状态", notes = "")
@GetMapping(value = "clusters/{connectClusterId}/connectors/{connectorName}/state")
@ResponseBody
public Result<MirrorMakerBaseStateVO> getMirrorMakerStateVO(@PathVariable Long connectClusterId, @PathVariable String connectorName) {
return mirrorMakerManager.getMirrorMakerState(connectClusterId, connectorName);
}
@ApiOperation(value = "获取MM2的Task列表", notes = "")
@GetMapping(value = "clusters/{connectClusterId}/connectors/{connectorName}/tasks")
@ResponseBody
public Result<Map<String, List<KCTaskOverviewVO>>> getConnectorTasks(@PathVariable Long connectClusterId, @PathVariable String connectorName) {
return mirrorMakerManager.getTaskOverview(connectClusterId, connectorName);
}
@ApiOperation(value = "MM2配置", notes = "")
@GetMapping(value ="clusters/{connectClusterId}/connectors/{connectorName}/config")
@ResponseBody
public Result<List<Properties>> getMM2Configs(@PathVariable Long connectClusterId, @PathVariable String connectorName) {
return mirrorMakerManager.getMM2Configs(connectClusterId, connectorName);
}
@ApiOperation(value = "Connector近期指标")
@PostMapping(value = "clusters/{connectClusterId}/connectors/{mirrorMakerName}/latest-metrics")
@ResponseBody
public Result<MirrorMakerMetrics> getMirrorMakerLatestMetrics(@PathVariable Long connectClusterId,
@PathVariable String mirrorMakerName,
@RequestBody List<String> metricsNames) {
return mirrorMakerMetricService.getLatestMetricsFromES(connectClusterId, mirrorMakerName, metricsNames);
}
}

View File

@@ -0,0 +1,37 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.enterprise.mirror;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.ClusterPhyBaseVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.stream.Collectors;
/**
* @author zengqiao
* @date 22/12/12
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Mirror-集群-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_HA_MIRROR_PREFIX)
public class MirrorClusterController {
@Autowired
private ClusterPhyService clusterPhyService;
@ApiOperation(value = "集群列表(支持Mirror)", notes = "")
@GetMapping(value = "physical-clusters/basic")
@ResponseBody
public Result<List<ClusterPhyBaseVO>> listClusters() {
List<ClusterPhy> clusterPhyList = clusterPhyService.listAllClusters().stream().filter(item -> item.getKafkaVersion().contains("2.5.0-d-")).collect(Collectors.toList());
return Result.buildSuc(ConvertUtil.list2List(clusterPhyList, ClusterPhyBaseVO.class));
}
}

View File

@@ -0,0 +1,51 @@
package com.xiaojukeji.know.streaming.km.rest.api.v3.enterprise.mirror;
import com.xiaojukeji.know.streaming.km.common.bean.dto.ha.mirror.MirrorTopicCreateDTO;
import com.xiaojukeji.know.streaming.km.common.bean.dto.ha.mirror.MirrorTopicDeleteDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.vo.ha.mirror.TopicMirrorInfoVO;
import com.xiaojukeji.know.streaming.km.common.constant.ApiPrefix;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import com.xiaojukeji.know.streaming.km.ha.mirror.service.MirrorTopicService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author zengqiao
* @date 22/12/12
*/
@Api(tags = Constant.SWAGGER_API_TAG_PREFIX + "Mirror-Topic-相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V3_HA_MIRROR_PREFIX)
public class MirrorTopicController {
@Autowired
private MirrorTopicService mirrorTopicService;
@ApiOperation(value = "批量创建Topic镜像", notes = "")
@PostMapping(value = "topics")
@ResponseBody
public Result<Void> batchCreateMirrorTopic(@Validated @RequestBody List<MirrorTopicCreateDTO> dtoList) {
return mirrorTopicService.batchCreateMirrorTopic(dtoList);
}
@ApiOperation(value = "批量删除Topic镜像", notes = "")
@DeleteMapping(value = "topics")
@ResponseBody
public Result<Void> batchDeleteMirrorTopic(@Validated @RequestBody List<MirrorTopicDeleteDTO> dtoList) {
return mirrorTopicService.batchDeleteMirrorTopic(dtoList);
}
@ApiOperation(value = "Topic镜像信息", notes = "")
@GetMapping(value = "clusters/{clusterPhyId}/topics/{topicName}/mirror-info")
@ResponseBody
public Result<List<TopicMirrorInfoVO>> getTopicsMirrorInfo(@PathVariable Long clusterPhyId,
@PathVariable String topicName) {
return mirrorTopicService.getTopicsMirrorInfo(clusterPhyId, topicName);
}
}

View File

@@ -17,6 +17,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
@@ -40,11 +41,35 @@ public class KafkaHealthController {
@RequestParam(required = false) Integer dimensionCode) {
HealthCheckDimensionEnum dimensionEnum = HealthCheckDimensionEnum.getByCode(dimensionCode);
if (!dimensionEnum.equals(HealthCheckDimensionEnum.UNKNOWN)) {
return Result.buildSuc(HealthScoreVOConverter.convert2HealthScoreResultDetailVOList(healthStateService.getDimensionHealthResult(clusterPhyId, dimensionEnum)));
return Result.buildSuc(
HealthScoreVOConverter.convert2HealthScoreResultDetailVOList(
healthStateService.getDimensionHealthResult(clusterPhyId, Collections.singletonList(dimensionCode))
)
);
}
return Result.buildSuc(
HealthScoreVOConverter.convert2HealthScoreResultDetailVOList(
healthStateService.getAllDimensionHealthResult(clusterPhyId)
)
);
}
@ApiOperation(value = "集群-健康检查详情")
@PostMapping(value = "clusters/{clusterPhyId}/health-detail")
@ResponseBody
public Result<List<HealthScoreResultDetailVO>> getClusterHealthCheckResultDetail(@PathVariable Long clusterPhyId,
@RequestBody List<Integer> dimensionCodeList) {
if (dimensionCodeList.isEmpty()) {
return Result.buildSuc(
HealthScoreVOConverter.convert2HealthScoreResultDetailVOList(
healthStateService.getAllDimensionHealthResult(clusterPhyId)
)
);
}
return Result.buildSuc(HealthScoreVOConverter.convert2HealthScoreResultDetailVOList(
healthStateService.getClusterHealthResult(clusterPhyId)
healthStateService.getDimensionHealthResult(clusterPhyId, dimensionCodeList)
));
}
@@ -55,7 +80,7 @@ public class KafkaHealthController {
@PathVariable Integer dimensionCode,
@PathVariable String resName) {
return Result.buildSuc(HealthScoreVOConverter.convert2HealthScoreBaseResultVOList(
healthStateService.getResHealthResult(clusterPhyId, dimensionCode, resName)
healthStateService.getResHealthResult(clusterPhyId, clusterPhyId, dimensionCode, resName)
));
}

View File

@@ -35,7 +35,7 @@ public class ReplicaMetricsController {
@PathVariable String topicName,
@PathVariable Integer partitionId,
@RequestBody MetricDTO dto) {
return replicationMetricService.getMetricPointsFromES(clusterPhyId, brokerId, topicName, partitionId, dto);
return Result.buildSuc();
}
@ApiOperation(value = "Replica指标-单个Replica")

View File

@@ -35,7 +35,19 @@ public class VersionController {
@GetMapping(value = "support-kafka-versions")
@ResponseBody
public Result<SortedMap<String, Long>> listAllVersions() {
Result<Map<String, Long>> rm = versionControlManager.listAllVersions();
Result<Map<String, Long>> rm = versionControlManager.listAllKafkaVersions();
if (rm.failed()) {
return Result.buildFromIgnoreData(rm);
}
return Result.buildSuc(new TreeMap<>(rm.getData()));
}
@ApiOperation(value = "支持的kafka-Connect版本列表", notes = "")
@GetMapping(value = "support-kafka-connect-versions")
@ResponseBody
public Result<SortedMap<String, Long>> listAllConnectVersions() {
Result<Map<String, Long>> rm = versionControlManager.listAllKafkaVersions();
if (rm.failed()) {
return Result.buildFromIgnoreData(rm);
}
@@ -54,7 +66,7 @@ public class VersionController {
@GetMapping(value = "clusters/{clusterId}/types/{type}/support-kafka-versions")
@ResponseBody
public Result<List<VersionItemVO>> listClusterVersionControlItem(@PathVariable Long clusterId, @PathVariable Integer type) {
return versionControlManager.listClusterVersionControlItem(clusterId, type);
return versionControlManager.listKafkaClusterVersionControlItem(clusterId, type);
}
@ApiOperation(value = "用户设置的指标显示项", notes = "")

View File

@@ -10,8 +10,6 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.ZookeeperMetr
import com.xiaojukeji.know.streaming.km.core.service.zookeeper.ZookeeperMetricService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
@@ -26,8 +24,6 @@ import java.util.List;
@RestController
@RequestMapping(ApiPrefix.API_V3_PREFIX)
public class ZookeeperMetricsController {
private static final Logger LOGGER = LoggerFactory.getLogger(ZookeeperMetricsController.class);
@Autowired
private ZookeeperMetricService zookeeperMetricService;

View File

@@ -0,0 +1,35 @@
package com.xiaojukeji.know.streaming.km.rest.config;
import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import javax.sql.DataSource;
/**
* @author wyb
* @date 2022/12/5
*/
@Configuration("myLogiSecurityDataSourceConfig")
@MapperScan(basePackages = "com.didiglobal.logi.security.dao.mapper",sqlSessionFactoryRef = "logiSecuritySqlSessionFactory")
public class LogiSecurityDataSourceConfig {
@Bean("logiSecuritySqlSessionFactory")
public SqlSessionFactory logiSecuritySqlSessionFactory(
@Qualifier("logiSecurityDataSource") DataSource dataSource) throws Exception {
MybatisSqlSessionFactoryBean bean = new MybatisSqlSessionFactoryBean();
bean.setDataSource(dataSource);
bean.getObject().getConfiguration().setMapUnderscoreToCamelCase(true);
return bean.getObject();
}
@Bean("logiSecuritySqlSessionTemplate")
public SqlSessionTemplate logiSecuritySqlSessionTemplate(
@Qualifier("logiSecuritySqlSessionFactory") SqlSessionFactory sqlSessionFactory) {
return new SqlSessionTemplate(sqlSessionFactory);
}
}

View File

@@ -63,7 +63,7 @@ public class PermissionInterceptor implements HandlerInterceptor {
classRequestMappingValue = getClassRequestMappingValue(handler);
} catch (Exception e) {
LOGGER.error(
"class=PermissionInterceptor||method=preHandle||uri={}||msg=parse class request-mapping failed",
"method=preHandle||uri={}||msg=parse class request-mapping failed",
request.getRequestURI(), e);
}

View File

@@ -31,8 +31,9 @@ spring:
init-sql: true
init-thread-num: 20
max-thread-num: 50
log-expire: 3 # 日志保存天数,以天为单位
log-expire: 3 # 日志保存天数,以天为单位
app-name: know-streaming
enable: true # true表示开启job任务, false表关闭。KS在部署上可以考虑部署两套服务一套处理前端请求一套执行job任务此时可以通过该字段进行控制
claim-strategy: com.didiglobal.logi.job.core.consensual.RandomConsensual
logi-security: # know-streaming 依赖的 logi-security 模块的数据库的配置,默认与 know-streaming 的数据库配置保持一致即可
jdbc-url: jdbc:mariadb://127.0.0.1:3306/know_streaming?useUnicode=true&characterEncoding=utf8&jdbcCompliantTruncation=true&allowMultiQueries=true&useSSL=false&alwaysAutoGeneratedKeys=true&serverTimezone=GMT%2B8&allowPublicKeyRetrieval=true
@@ -50,7 +51,6 @@ logging:
thread-pool:
scheduled:
thread-num: 2 # @Scheduled任务的线程池大小默认是一个
collector: # 采集模块的配置
future-util: # 采集模块线程池配置
num: 3 # 线程池个数
@@ -58,7 +58,6 @@ thread-pool:
queue-size: 10000 # 每个线程池队列大小
select-suitable-enable: true # 任务是否自动选择合适的线程池,非主要,可不修改
suitable-queue-size: 1000 # 线程池理想的队列大小,非主要,可不修改
task: # 任务模块的配置
metrics: # metrics采集任务配置
thread-num: 18 # metrics采集任务线程池核心线程数
@@ -69,6 +68,10 @@ thread-pool:
common: # 剩余其他任务配置
thread-num: 15 # 剩余其他任务线程池核心线程数
queue-size: 150 # 剩余其他任务线程池队列大小
es:
search: # es查询线程池
thread-num: 10 # 线程池大小
queue-size: 5000 # 队列大小
# 客户端池大小相关配置
@@ -78,16 +81,19 @@ client-pool:
max-idle-client-num: 20 # 最大空闲客户端数
max-total-client-num: 20 # 最大客户端数
borrow-timeout-unit-ms: 5000 # 租借超时时间,单位秒
kafka-admin:
client-cnt: 1 # 每个Kafka集群创建的KafkaAdminClient数
# ES客户端配置
es:
client:
address: 127.0.0.1:8091,127.0.0.1:8061,127.0.0.1:8061
pass: # ES账号密码如果有账号密码按照 username:password 的格式填写,没有则不需要填写
client-cnt: 10 # 创建的ES客户端数
client-cnt: 2 # 创建的ES客户端数
io-thread-cnt: 2
max-retry-cnt: 5
index:
expire: 15 # 索引过期天数15表示超过15天的索引会被KS过期删除
# 集群自动均衡相关配置

View File

@@ -1,63 +0,0 @@
<assembly>
<id>assembly</id>
<formats>
<format>tar</format>
<format>zip</format>
</formats>
<fileSets>
<fileSet>
<directory>src/main/resources/bin</directory>
<outputDirectory>bin</outputDirectory>
<includes>
<include>control.sh</include>
<include>start.bat</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
<directory>src/main/resources</directory>
<outputDirectory>config</outputDirectory>
<includes>
<include>*.properties</include>
<include>*.xml</include>
<include>*.yml</include>
<include>env/dev/*</include>
<include>env/qa/*</include>
<include>env/uat/*</include>
<include>env/prod/*</include>
</includes>
</fileSet>
<fileSet>
<directory>target</directory>
<outputDirectory>lib</outputDirectory>
<includes>
<!--
<include>*release*.jar</include>
-->
<include>kafka-manager-web*.jar</include>
</includes>
<excludes>
<exclude>*sources.jar</exclude>
</excludes>
</fileSet>
<fileSet>
<directory>src/main/resources</directory>
<outputDirectory>logs</outputDirectory>
<fileMode>0755</fileMode>
<excludes>
<exclude>**/*</exclude>
</excludes>
</fileSet>
<!-- <fileSet>
<directory>${project.build.directory}/asciidoc</directory>
<outputDirectory>docs</outputDirectory>
<includes>
<include>md/*</include>
<include>html/*</include>
<include>pdf/*</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>-->
</fileSets>
</assembly>

View File

@@ -30,7 +30,7 @@
<file>${log.path}/log_debug.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level class=%logger{50}||%msg%n</pattern>
<charset>UTF-8</charset> <!-- 设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
@@ -58,7 +58,7 @@
<file>${log.path}/log_info.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level class=%logger{50}||%msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
@@ -85,7 +85,7 @@
<file>${log.path}/log_warn.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level class=%logger{50}||%msg%n</pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
@@ -112,7 +112,7 @@
<file>${log.path}/log_error.log</file>
<!--日志文件输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level class=%logger{50}||%msg%n</pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
@@ -136,7 +136,7 @@
<appender name="ES_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/es/es.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level class=%logger{50}||%msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
@@ -149,14 +149,14 @@
</appender>
<!-- Metrics信息收集日志 -->
<appender name="METRIC_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/metric/metrics.log</file>
<appender name="METRIC_COLLECTED_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/metric/metric_collected.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level class=%logger{50}||%msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/metric/metrics_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<fileNamePattern>${log.path}/metric/metric_collected_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
@@ -168,7 +168,7 @@
<appender name="TASK_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/task/task.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level class=%logger{50}||%msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
@@ -193,12 +193,13 @@
</encoder>
</appender>
<logger name="ES_LOGGER" level="ERROR" additivity="false">
<logger name="ES_LOGGER" level="WARN" additivity="false">
<appender-ref ref="ES_LOGGER"/>
</logger>
<logger name="METRIC_LOGGER" level="ERROR" additivity="false">
<appender-ref ref="METRIC_LOGGER"/>
<!-- 采集到的metrics数据, level调整为DEBUG后便可打印出来 -->
<logger name="METRIC_COLLECTED_LOGGER" level="INFO" additivity="false">
<appender-ref ref="METRIC_COLLECTED_LOGGER"/>
</logger>
<logger name="TASK_LOGGER" level="ERROR" additivity="false">
@@ -209,6 +210,9 @@
<appender-ref ref="logIJobLogger" />
</logger>
<!-- ILog中存在重复打印error日志的logger不明确具体用途当前先将其关闭 -->
<logger name="errorLogger" level="OFF"> </logger>
<logger name="org.apache.zookeeper" level="WARN" additivity="false" />
<logger name="org.apache.ibatis" level="INFO" additivity="false" />
<logger name="org.mybatis.spring" level="INFO" additivity="false" />

View File

@@ -1,13 +1,10 @@
package com.xiaojukeji.know.streaming.km;
import com.xiaojukeji.know.streaming.km.rest.KnowStreaming;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.xiaojukeji.know.streaming.test.KMTestEnvService;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.http.HttpHeaders;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
@@ -15,32 +12,19 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
/**
* @author d06679
* @date 2019/4/11
*
* <p>
* 得使用随机端口号,这样行执行单元测试的时候,不会出现端口号占用的情况
*/
@ActiveProfiles("test")
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = KnowStreaming.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
public class KnowStreamApplicationTest {
protected HttpHeaders headers;
public class KnowStreamApplicationTest extends KMTestEnvService {
@LocalServerPort
private Integer port;
@BeforeEach
public void setUp() {
// 获取 springboot server 监听的端口号
// port = applicationContext.getWebServer().getPort();
System.out.println( String.format("port is : [%d]", port));
//
// headers = new HttpHeaders();
// headers.add("X-SSO-USER", "zengqiao");
}
@Test
public void test() {
Assertions.assertNotNull(port);
}
// @Test
// public void test() {
// Assertions.assertNotNull(port);
// }
}

View File

@@ -18,12 +18,13 @@ import java.util.List;
public class TopicMetricServiceTest extends KnowStreamApplicationTest {
Long clusterId = 1L;
@Autowired
private TopicMetricService topicMetricService;
@Test
public void listTopicMetricsFromESTest(){
Long clusterId = 1l;
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 3600 * 1000;
@@ -47,7 +48,6 @@ public class TopicMetricServiceTest extends KnowStreamApplicationTest {
@Test
public void pagingTopicWithLatestMetricsFromESTest(){
Long clusterId = 2l;
List<String> metricNameList = new ArrayList<>();
SearchSort sort = new SearchSort();
sort.setQueryName("LogSize");

View File

@@ -0,0 +1,55 @@
package com.xiaojukeji.know.streaming.km.core.service.cluster;
import com.xiaojukeji.know.streaming.km.KnowStreamApplicationTest;
import com.xiaojukeji.know.streaming.km.common.bean.dto.cluster.ClusterPhyAddDTO;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.JmxConfig;
import com.xiaojukeji.know.streaming.km.common.converter.ClusterConverter;
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum;
import com.xiaojukeji.know.streaming.km.common.exception.AdminOperateException;
import com.xiaojukeji.know.streaming.km.common.exception.DuplicateException;
import com.xiaojukeji.know.streaming.km.common.exception.ParamErrorException;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
import java.util.Properties;
@Slf4j
public class ClusterPhyServiceTest extends KnowStreamApplicationTest {
@Autowired
private ClusterPhyService clusterPhyService;
@Test
@Order(Integer.MIN_VALUE)
void addClusterPhyTest() {
try {
Properties properties = new Properties();
JmxConfig jmxConfig = new JmxConfig();
jmxConfig.setOpenSSL(false);
ClusterPhyAddDTO dto = new ClusterPhyAddDTO();
dto.setName("test");
dto.setDescription("");
dto.setKafkaVersion(VersionEnum.V_2_5_1.getVersion());
dto.setJmxProperties(jmxConfig);
dto.setClientProperties(properties);
dto.setZookeeper(zookeeperUrl());
dto.setBootstrapServers(bootstrapServers());
Assertions.assertEquals(1,
clusterPhyService.addClusterPhy(ClusterConverter.convert2ClusterPhyPO(dto), "root"));
} catch (ParamErrorException | DuplicateException | AdminOperateException e) {
throw new RuntimeException(e);
}
}
@Test
void listAllClustersTest() {
List<ClusterPhy> clusterPhies = clusterPhyService.listAllClusters();
Assertions.assertNotNull(clusterPhies);
log.info("集群列表:{}", clusterPhies);
}
}

View File

@@ -6,6 +6,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchRange;
import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchSort;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
import com.xiaojukeji.know.streaming.km.persistence.es.dao.BrokerMetricESDAO;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
@@ -14,8 +15,11 @@ import java.util.Arrays;
import java.util.List;
import java.util.Map;
@Slf4j
public class BrokerMetricESDAOTest extends KnowStreamApplicationTest {
Long clusterId = 1L;
@Autowired
private BrokerMetricESDAO brokerMetriceESDAO;
@@ -25,7 +29,7 @@ public class BrokerMetricESDAOTest extends KnowStreamApplicationTest {
SearchSort def = new SearchSort("timestamp", true);
String sortDsl = brokerMetriceESDAO.buildSortDsl(sort, def);
System.out.println(sortDsl);
log.info(sortDsl);
}
@Test
@@ -33,7 +37,7 @@ public class BrokerMetricESDAOTest extends KnowStreamApplicationTest {
SearchRange sort = new SearchRange("age", 1232321f, 45345345345f);
String sortDsl = brokerMetriceESDAO.buildRangeDsl(sort);
System.out.println(sortDsl);
log.info(sortDsl);
}
@Test
@@ -44,12 +48,11 @@ public class BrokerMetricESDAOTest extends KnowStreamApplicationTest {
String matchDsl = brokerMetriceESDAO.buildMatchDsl(matches);
System.out.println(matchDsl);
log.info(matchDsl);
}
@Test
public void getBrokerMetricsPointTest(){
Long clusterId = 2L;
Integer brokerId = 1;
List<String> metrics = Arrays.asList("BytesIn", "BytesIn_min_5");
Long endTime = System.currentTimeMillis();
@@ -63,7 +66,6 @@ public class BrokerMetricESDAOTest extends KnowStreamApplicationTest {
@Test
public void listBrokerMetricesByBrokerIdsTest(){
Long clusterId = 123L;
List<String> metrics = Arrays.asList("BytesInPerSec_min_1", "BytesInPerSec_min_15");
List<Long> brokerIds = Arrays.asList(1L);
Long endTime = System.currentTimeMillis();
@@ -74,7 +76,6 @@ public class BrokerMetricESDAOTest extends KnowStreamApplicationTest {
@Test
public void listBrokerMetricsByTopTest(){
Long clusterId = 123L;
List<String> metrics = Arrays.asList("BytesInPerSec_min_1", "BytesInPerSec_min_15");
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
@@ -84,7 +85,6 @@ public class BrokerMetricESDAOTest extends KnowStreamApplicationTest {
@Test
public void getTopBrokerIdsTest(){
Long clusterId = 123L;
List<String> metrics = Arrays.asList("BytesInPerSec_min_1", "BytesInPerSec_min_15");
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;

View File

@@ -15,6 +15,8 @@ import java.util.*;
public class ClusterMetricESDAOTest extends KnowStreamApplicationTest {
Long clusterId = 1L;
@Autowired
private ClusterMetricESDAO clusterMetricESDAO;
@@ -34,7 +36,6 @@ public class ClusterMetricESDAOTest extends KnowStreamApplicationTest {
*/
@Test
public void getClusterMetricsPointTest(){
Long clusterId = 1L;
List<String> metrics = Arrays.asList(
"Connections", "BytesIn_min_15", "PartitionURP",
"HealthScore_Topics", "EventQueueSize", "ActiveControllerCount",
@@ -67,7 +68,6 @@ public class ClusterMetricESDAOTest extends KnowStreamApplicationTest {
*/
@Test
public void getClusterLatestMetricsTest(){
Long clusterId = 1L;
List<String> metrics = Collections.emptyList();
ClusterMetricPO clusterLatestMetrics = clusterMetricESDAO.getClusterLatestMetrics(clusterId, metrics);

View File

@@ -20,12 +20,13 @@ import java.util.Set;
public class GroupMetricESDAOTest extends KnowStreamApplicationTest {
Long clusterId = 1L;
@Autowired
private GroupMetricESDAO groupMetricESDAO;
@Test
public void listLatestMetricsAggByGroupTopicTest(){
Long clusterPhyId = 2L;
List<GroupTopic> groupTopicList = new ArrayList<>();
groupTopicList.add(new GroupTopic("g-know-streaming-123456", "know-streaming-test-251"));
groupTopicList.add(new GroupTopic("test_group", "know-streaming-test-251"));
@@ -33,14 +34,13 @@ public class GroupMetricESDAOTest extends KnowStreamApplicationTest {
List<String> metrics = Arrays.asList("OffsetConsumed", "Lag");
AggTypeEnum aggType = AggTypeEnum.AVG;
List<GroupMetricPO> groupMetricPOS = groupMetricESDAO.listLatestMetricsAggByGroupTopic(clusterPhyId, groupTopicList, metrics, aggType);
List<GroupMetricPO> groupMetricPOS = groupMetricESDAO.listLatestMetricsAggByGroupTopic(clusterId, groupTopicList, metrics, aggType);
assert !CollectionUtils.isEmpty(groupMetricPOS);
}
@Test
public void listGroupTopicPartitionsTest(){
Long clusterId = 2L;
String groupName = "g-know-streaming-123456";
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 24 * 3600 * 1000;
@@ -51,17 +51,15 @@ public class GroupMetricESDAOTest extends KnowStreamApplicationTest {
@Test
public void listPartitionLatestMetricsTest(){
Long clusterId = 2L;
String groupName = "test_group_20220421";
String topicName = "know-streaming-test-251";
List<GroupMetricPO> groupMetricPOS = groupMetricESDAO.listPartitionLatestMetrics(clusterId, groupName, topicName, null);
assert !CollectionUtils.isEmpty(groupMetricPOS);
assert CollectionUtils.isEmpty(groupMetricPOS);
}
@Test
public void countMetricValueTest(){
Long clusterId = 3L;
String groupName = "test_group";
SearchTerm searchTerm = new SearchTerm("HealthCheckTotal", "1", false);
@@ -75,7 +73,6 @@ public class GroupMetricESDAOTest extends KnowStreamApplicationTest {
@Test
public void listGroupMetricsTest(){
Long clusterId = 2L;
String groupName = "g-know-streaming-123456";
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 24 * 3600 * 1000;

View File

@@ -11,16 +11,17 @@ import java.util.List;
public class PartitionMetricESDAOTest extends KnowStreamApplicationTest {
Long clusterId = 1L;
@Autowired
private PartitionMetricESDAO partitionMetricESDAO;
@Test
public void listPartitionLatestMetricsByTopicTest(){
Long clusterPhyId = 2L;
String topic = "__consumer_offsets";
List<PartitionMetricPO> partitionMetricPOS = partitionMetricESDAO.listPartitionLatestMetricsByTopic(
clusterPhyId, topic, new ArrayList<>());
clusterId, topic, new ArrayList<>());
assert null != partitionMetricPOS;
}

View File

@@ -1,48 +0,0 @@
package com.xiaojukeji.know.streaming.km.persistence.es;
import com.xiaojukeji.know.streaming.km.KnowStreamApplicationTest;
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.ReplicationMetricPO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
import com.xiaojukeji.know.streaming.km.persistence.es.dao.ReplicationMetricESDAO;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Map;
public class ReplicationMetricESDAOTest extends KnowStreamApplicationTest {
@Autowired
private ReplicationMetricESDAO replicationMetricESDAO;
@Test
public void getReplicationLatestMetricsTest(){
Long clusterPhyId = 2L;
Integer brokerId = 1;
String topic = "know-streaming-test-251";
Integer partitionId = 1;
ReplicationMetricPO replicationMetricPO = replicationMetricESDAO.getReplicationLatestMetrics(
clusterPhyId, brokerId, topic, partitionId, new ArrayList<>());
assert null != replicationMetricPO;
}
/**
* 测试
* 获取集群 clusterPhyId 中每个 metric 的指定 partitionId 在指定时间[startTime、endTime]区间内聚合计算(avg、max)之后的统计值
*/
@Test
public void getReplicationMetricsPointTest(){
Long clusterPhyId = 2L;
Integer brokerId = 1;
String topic = "know-streaming-test-251";
Integer partitionId = 1;
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
Map<String, MetricPointVO> metricPointVOMap = replicationMetricESDAO.getReplicationMetricsPoint(
clusterPhyId, topic, brokerId, partitionId, Collections.emptyList(), "avg", startTime, endTime);
assert null != metricPointVOMap;
}
}

View File

@@ -8,22 +8,25 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.search.SearchSort;
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.TopicMetricPO;
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
import com.xiaojukeji.know.streaming.km.persistence.es.dao.TopicMetricESDAO;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@Slf4j
public class TopicMetricESDAOTest extends KnowStreamApplicationTest {
Long clusterId = 1L;
@Autowired
private TopicMetricESDAO topicMetricESDAO;
@Test
public void listTopicMaxMinMetricsTest(){
Long clusterId = 2L;
String topic = "know-streaming-test-251";
String topic1 = "topic_test01";
Long endTime = System.currentTimeMillis();
@@ -36,7 +39,6 @@ public class TopicMetricESDAOTest extends KnowStreamApplicationTest {
@Test
public void getTopicsAggsMetricsValueTest(){
Long clusterId = 2L;
List<String> topicList = Arrays.asList("know-streaming-test-251", "topic_test01");
List<String> metrics = Arrays.asList(
"Messages", "BytesIn_min_15", "BytesRejected",
@@ -56,7 +58,6 @@ public class TopicMetricESDAOTest extends KnowStreamApplicationTest {
@Test
public void listTopicWithLatestMetricsTest(){
Long clusterId = 2L;
SearchSort sort = new SearchSort("LogSize", true);
sort.setMetric(true);
@@ -65,12 +66,11 @@ public class TopicMetricESDAOTest extends KnowStreamApplicationTest {
List<TopicMetricPO> topicMetricPOS = topicMetricESDAO.listTopicWithLatestMetrics(clusterId, sort, fuzzy, null, terms);
assert !CollectionUtils.isEmpty(topicMetricPOS);
log.info("{}", topicMetricPOS);
}
@Test
public void getTopicLatestMetricByBrokerIdTest(){
Long clusterId = 2L;
String topic = "know-streaming-test-251";
Integer brokerId = 1;
@@ -81,7 +81,6 @@ public class TopicMetricESDAOTest extends KnowStreamApplicationTest {
@Test
public void getTopicLatestMetricTest(){
Long clusterId = 2L;
String topic = "know-streaming-test-251";
TopicMetricPO topicMetricPO = topicMetricESDAO.getTopicLatestMetric(clusterId, topic, new ArrayList<>());
@@ -91,7 +90,6 @@ public class TopicMetricESDAOTest extends KnowStreamApplicationTest {
@Test
public void listTopicLatestMetricTest(){
Long clusterId = 2L;
String topic = "know-streaming-test-251";
String topic1 = "know-streaming-123";
String topic2 = "1209test";
@@ -112,7 +110,6 @@ public class TopicMetricESDAOTest extends KnowStreamApplicationTest {
@Test
public void listBrokerMetricsByTopicsTest(){
Long clusterId = 2L;
List<String> metrics = Arrays.asList(
"Messages", "BytesIn_min_15", "BytesRejected",
"PartitionURP", "HealthCheckTotal", "ReplicationCount",
@@ -125,12 +122,13 @@ public class TopicMetricESDAOTest extends KnowStreamApplicationTest {
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
topicMetricESDAO.listTopicMetricsByTopics(clusterId, metrics, "avg", topics, startTime, endTime);
Table<String, String, List<MetricPointVO>> list =
topicMetricESDAO.listTopicMetricsByTopics(clusterId, metrics, "avg", topics, startTime, endTime);
Assertions.assertNotNull(list);
}
@Test
public void countMetricValueOccurrencesTest(){
Long clusterPhyId = 2L;
String topic = "__consumer_offsets";
String metricName = "HealthCheckPassed";
Float metricValue = 2f;
@@ -142,7 +140,7 @@ public class TopicMetricESDAOTest extends KnowStreamApplicationTest {
Long endTime = System.currentTimeMillis();
Long startTime = endTime - 4 * 60 * 60 * 1000;
Integer i = topicMetricESDAO.countMetricValue(clusterPhyId, topic, searchMatch, startTime, endTime);
Integer i = topicMetricESDAO.countMetricValue(clusterId, topic, searchMatch, startTime, endTime);
assert null != i;
}

View File

@@ -0,0 +1,82 @@
package com.xiaojukeji.know.streaming.test;
import com.xiaojukeji.know.streaming.test.container.es.ESTestContainer;
import com.xiaojukeji.know.streaming.test.container.kafka.KafkaTestContainer;
import com.xiaojukeji.know.streaming.test.container.mysql.MySQLTestContainer;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.springframework.test.context.DynamicPropertyRegistry;
import org.springframework.test.context.DynamicPropertySource;
public abstract class KMTestEnvService {
private static final boolean useES = true;
private static final boolean useMysql = true;
private static final boolean useKafka = true;
private static MySQLTestContainer mySQLTestContainer;
private static ESTestContainer esTestContainer;
private static KafkaTestContainer kafkaTestContainer;
@BeforeAll
static void init() {
if (useMysql) {
mySQLTestContainer = new MySQLTestContainer();
mySQLTestContainer.init();
}
if (useES) {
esTestContainer = new ESTestContainer();
esTestContainer.init();
}
if (useKafka) {
kafkaTestContainer = new KafkaTestContainer();
kafkaTestContainer.init();
}
}
@DynamicPropertySource
static void setUp(DynamicPropertyRegistry registry) {
registry.add("spring.datasource.know-streaming.jdbc-url", mySQLTestContainer.jdbcUrl());
registry.add("spring.datasource.know-streaming.username", mySQLTestContainer.jdbcUsername());
registry.add("spring.datasource.know-streaming.password", mySQLTestContainer.jdbcPassword());
registry.add("spring.logi-job.jdbc-url", mySQLTestContainer.jdbcUrl());
registry.add("spring.logi-job.username", mySQLTestContainer.jdbcUsername());
registry.add("spring.logi-job.password", mySQLTestContainer.jdbcPassword());
registry.add("spring.logi-security.jdbc-url", mySQLTestContainer.jdbcUrl());
registry.add("spring.logi-security.username", mySQLTestContainer.jdbcUsername());
registry.add("spring.logi-security.password", mySQLTestContainer.jdbcPassword());
registry.add("es.client.address", esTestContainer.esUrl());
}
@AfterAll
static void destroy() {
if (mySQLTestContainer != null) {
mySQLTestContainer.cleanup();
}
if (esTestContainer != null) {
esTestContainer.cleanup();
}
if (kafkaTestContainer != null) {
kafkaTestContainer.cleanup();
}
}
protected String bootstrapServers() {
return kafkaTestContainer.getBootstrapServers();
}
protected String zookeeperUrl() {
return kafkaTestContainer.getZKUrl();
}
}

View File

@@ -0,0 +1,7 @@
package com.xiaojukeji.know.streaming.test.container;
public abstract class BaseTestContainer {
public abstract void init();
public abstract void cleanup();
}

View File

@@ -0,0 +1,34 @@
package com.xiaojukeji.know.streaming.test.container.es;
import com.xiaojukeji.know.streaming.test.container.BaseTestContainer;
import org.jetbrains.annotations.NotNull;
import org.testcontainers.elasticsearch.ElasticsearchContainer;
import org.testcontainers.lifecycle.Startables;
import org.testcontainers.utility.DockerImageName;
import java.util.function.Supplier;
public class ESTestContainer extends BaseTestContainer {
// es容器
private static final ElasticsearchContainer ES_CONTAINER = new ElasticsearchContainer(
DockerImageName.parse("docker.io/library/elasticsearch:7.6.2").asCompatibleSubstituteFor("docker.elastic.co/elasticsearch/elasticsearch")
)
.withEnv("TZ", "Asia/Shanghai")
.withEnv("ES_JAVA_OPTS", "-Xms512m -Xmx512m")
.withEnv("discovery.type", "single-node");
@NotNull
public Supplier<Object> esUrl() {
return () -> ES_CONTAINER.getHost() + ":" + ES_CONTAINER.getMappedPort(9200);
}
@Override
public void init() {
Startables.deepStart(ES_CONTAINER).join();
}
@Override
public void cleanup() {
}
}

View File

@@ -0,0 +1,31 @@
package com.xiaojukeji.know.streaming.test.container.kafka;
import com.xiaojukeji.know.streaming.test.container.BaseTestContainer;
import org.testcontainers.containers.KafkaContainer;
import org.testcontainers.lifecycle.Startables;
import org.testcontainers.utility.DockerImageName;
public class KafkaTestContainer extends BaseTestContainer {
// kafka容器
private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(
DockerImageName.parse("confluentinc/cp-kafka:7.3.1")
).withEnv("TZ", "Asia/Shanghai");
@Override
public void init() {
Startables.deepStart(KAFKA_CONTAINER).join();
}
@Override
public void cleanup() {
}
public String getBootstrapServers() {
return KAFKA_CONTAINER.getBootstrapServers();
}
public String getZKUrl() {
return String.format("%s:%d", KAFKA_CONTAINER.getHost(), KAFKA_CONTAINER.getMappedPort(2181));
}
}

View File

@@ -0,0 +1,212 @@
package com.xiaojukeji.know.streaming.test.container.mysql;
import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.utils.Tuple;
import org.jetbrains.annotations.NotNull;
import org.testcontainers.containers.ContainerLaunchException;
import org.testcontainers.containers.JdbcDatabaseContainer;
import org.testcontainers.containers.MySQLContainer;
import org.testcontainers.delegate.DatabaseDelegate;
import org.testcontainers.ext.ScriptUtils;
import org.testcontainers.jdbc.JdbcDatabaseDelegate;
import org.testcontainers.utility.DockerImageName;
import javax.script.ScriptException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* @author richardnorth
* @see org.testcontainers.containers.MySQLContainer
*/
public class KSMySQLContainer<SELF extends KSMySQLContainer<SELF>> extends JdbcDatabaseContainer<SELF> {
private static final ILog LOGGER = LogFactory.getLog(KSMySQLContainer.class);
public static final String NAME = "mysql";
private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("mysql");
@Deprecated
public static final String DEFAULT_TAG = "5.7.34";
@Deprecated
public static final String IMAGE = DEFAULT_IMAGE_NAME.getUnversionedPart();
static final String DEFAULT_USER = "test";
static final String DEFAULT_PASSWORD = "test";
private static final String MY_CNF_CONFIG_OVERRIDE_PARAM_NAME = "TC_MY_CNF";
public static final Integer MYSQL_PORT = 3306;
private String databaseName = "test";
private String username = DEFAULT_USER;
private String password = DEFAULT_PASSWORD;
private static final String MYSQL_ROOT_USER = "root";
private List<Tuple<String, String>> initScriptPathAndContentList = new ArrayList<>();
/**
* @deprecated use {@link MySQLContainer(DockerImageName)} instead
*/
@Deprecated
public KSMySQLContainer() {
this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_TAG));
}
public KSMySQLContainer(String dockerImageName) {
this(DockerImageName.parse(dockerImageName));
}
public KSMySQLContainer(final DockerImageName dockerImageName) {
super(dockerImageName);
dockerImageName.assertCompatibleWith(DEFAULT_IMAGE_NAME);
addExposedPort(MYSQL_PORT);
}
/**
* @return the ports on which to check if the container is ready
* @deprecated use {@link #getLivenessCheckPortNumbers()} instead
*/
@NotNull
@Override
@Deprecated
protected Set<Integer> getLivenessCheckPorts() {
return super.getLivenessCheckPorts();
}
@Override
protected void configure() {
optionallyMapResourceParameterAsVolume(
MY_CNF_CONFIG_OVERRIDE_PARAM_NAME,
"/etc/mysql/conf.d",
"mysql-default-conf"
);
addEnv("MYSQL_DATABASE", databaseName);
if (!MYSQL_ROOT_USER.equalsIgnoreCase(username)) {
addEnv("MYSQL_USER", username);
}
if (password != null && !password.isEmpty()) {
addEnv("MYSQL_PASSWORD", password);
addEnv("MYSQL_ROOT_PASSWORD", password);
} else if (MYSQL_ROOT_USER.equalsIgnoreCase(username)) {
addEnv("MYSQL_ALLOW_EMPTY_PASSWORD", "yes");
} else {
throw new ContainerLaunchException("Empty password can be used only with the root user");
}
setStartupAttempts(3);
}
@Override
public String getDriverClassName() {
// KS改动的地方
try {
Class.forName("org.mariadb.jdbc.Driver");
return "org.mariadb.jdbc.Driver";
} catch (ClassNotFoundException e) {
return "org.mariadb.jdbc.Driver";
}
}
@Override
public String getJdbcUrl() {
String additionalUrlParams = constructUrlParameters("?", "&");
// KS改动的地方
return "jdbc:mariadb://" + getHost() + ":" + getMappedPort(MYSQL_PORT) + "/" + databaseName + additionalUrlParams;
}
@Override
protected String constructUrlForConnection(String queryString) {
String url = super.constructUrlForConnection(queryString);
if (!url.contains("useSSL=")) {
String separator = url.contains("?") ? "&" : "?";
url = url + separator + "useSSL=false";
}
if (!url.contains("allowPublicKeyRetrieval=")) {
url = url + "&allowPublicKeyRetrieval=true";
}
return url;
}
@Override
public String getDatabaseName() {
return databaseName;
}
@Override
public String getUsername() {
return username;
}
@Override
public String getPassword() {
return password;
}
@Override
public String getTestQueryString() {
return "SELECT 1";
}
public SELF withConfigurationOverride(String s) {
parameters.put(MY_CNF_CONFIG_OVERRIDE_PARAM_NAME, s);
return self();
}
@Override
public SELF withDatabaseName(final String databaseName) {
this.databaseName = databaseName;
return self();
}
@Override
public SELF withUsername(final String username) {
this.username = username;
return self();
}
@Override
public SELF withPassword(final String password) {
this.password = password;
return self();
}
public SELF addInitScriptPathAndContent(String initScriptPath, String initScriptContent) {
initScriptPathAndContentList.add(new Tuple<>(initScriptPath, initScriptContent));
return self();
}
// KS改动的地方
@Override
public DatabaseDelegate getDatabaseDelegate() {
return new JdbcDatabaseDelegate(this, "");
}
@Override
protected void runInitScriptIfRequired() {
if (initScriptPathAndContentList.isEmpty()) {
return;
}
for (Tuple<String, String> elem: initScriptPathAndContentList) {
try {
ScriptUtils.executeDatabaseScript(this.getDatabaseDelegate(), elem.getV1(), elem.getV2());
} catch (ScriptException var5) {
LOGGER.error("Error while executing init script: {}", elem.getV1(), var5);
throw new ScriptUtils.UncategorizedScriptException("Error while executing init script: " + elem.getV2(), var5);
}
}
}
}

View File

@@ -0,0 +1,65 @@
package com.xiaojukeji.know.streaming.test.container.mysql;
import com.xiaojukeji.know.streaming.km.persistence.utils.LoadSQLUtil;
import com.xiaojukeji.know.streaming.test.container.BaseTestContainer;
import org.jetbrains.annotations.NotNull;
import org.testcontainers.lifecycle.Startables;
import org.testcontainers.utility.DockerImageName;
import java.util.function.Supplier;
public class MySQLTestContainer extends BaseTestContainer {
private static final String DB_USERNAME = "root";
private static final String DB_PASSWORD = "1234567890";
private static final String DATABASE_NAME = "know_streaming";
private static final String DB_PROPERTY = "?useUnicode=true" +
"&characterEncoding=utf8" +
"&jdbcCompliantTruncation=true" +
"&allowMultiQueries=true" +
"&useSSL=false" +
"&alwaysAutoGeneratedKeys=true" +
"&serverTimezone=GMT%2B8" +
"&allowPublicKeyRetrieval=true";
private static final KSMySQLContainer<?> MYSQL_CONTAINER = new KSMySQLContainer<>(
DockerImageName.parse("mysql:5.7").asCompatibleSubstituteFor("mysql")
)
.withEnv("TZ", "Asia/Shanghai")
.withDatabaseName(DATABASE_NAME)
.withUsername(DB_USERNAME)
.withPassword(DB_PASSWORD)
.addInitScriptPathAndContent(LoadSQLUtil.SQL_DDL_KS_KM, String.format("use %s;\n%s", DATABASE_NAME, LoadSQLUtil.loadSQL(LoadSQLUtil.SQL_DDL_KS_KM)))
.addInitScriptPathAndContent(LoadSQLUtil.SQL_DDL_LOGI_JOB, String.format("use %s;\n%s", DATABASE_NAME, LoadSQLUtil.loadSQL(LoadSQLUtil.SQL_DDL_LOGI_JOB)))
.addInitScriptPathAndContent(LoadSQLUtil.SQL_DDL_LOGI_SECURITY, String.format("use %s;\n%s", DATABASE_NAME, LoadSQLUtil.loadSQL(LoadSQLUtil.SQL_DDL_LOGI_SECURITY)))
.addInitScriptPathAndContent(LoadSQLUtil.SQL_DML_KS_KM, String.format("use %s;\n%s", DATABASE_NAME, LoadSQLUtil.loadSQL(LoadSQLUtil.SQL_DML_KS_KM)))
.addInitScriptPathAndContent(LoadSQLUtil.SQL_DML_LOGI, String.format("use %s;\n%s", DATABASE_NAME, LoadSQLUtil.loadSQL(LoadSQLUtil.SQL_DML_LOGI)))
;
@NotNull
public Supplier<Object> jdbcUsername() {
return () -> DB_USERNAME;
}
@NotNull
public Supplier<Object> jdbcPassword() {
return () -> DB_PASSWORD;
}
@NotNull
public Supplier<Object> jdbcUrl() {
return () -> "jdbc:mariadb://"
+ MYSQL_CONTAINER.getHost() + ":" + MYSQL_CONTAINER.getMappedPort(3306)
+ "/" + DATABASE_NAME + DB_PROPERTY;
}
@Override
public void init() {
Startables.deepStart(MYSQL_CONTAINER).join();
}
@Override
public void cleanup() {
}
}