mirror of
https://github.com/didi/KnowStreaming.git
synced 2026-01-03 11:28:12 +08:00
合并master分支
This commit is contained in:
4
.github/workflows/ci_build.yml
vendored
4
.github/workflows/ci_build.yml
vendored
@@ -2,9 +2,9 @@ name: KnowStreaming Build
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ "master", "ve_3.x", "ve_demo_3.x" ]
|
branches: [ "*" ]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ "master", "ve_3.x", "ve_demo_3.x" ]
|
branches: [ "*" ]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|||||||
@@ -101,7 +101,9 @@
|
|||||||
|
|
||||||
**点击 [这里](https://doc.knowstreaming.com/product),也可以从官网获取到更多文档**
|
**点击 [这里](https://doc.knowstreaming.com/product),也可以从官网获取到更多文档**
|
||||||
|
|
||||||
|
**`产品网址`**
|
||||||
|
- [产品官网:https://knowstreaming.com](https://knowstreaming.com)
|
||||||
|
- [体验环境:https://demo.knowstreaming.com](https://demo.knowstreaming.com),登陆账号:admin/admin
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -53,6 +53,11 @@ INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `l
|
|||||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2046', '0', 'know-streaming');
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2046', '0', 'know-streaming');
|
||||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2048', '0', 'know-streaming');
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2048', '0', 'know-streaming');
|
||||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2050', '0', 'know-streaming');
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2050', '0', 'know-streaming');
|
||||||
|
|
||||||
|
|
||||||
|
-- 多集群管理权限2023-07-18新增
|
||||||
|
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2052', 'Security-User查看密码', '1593', '1', '2', 'Security-User查看密码', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2052', '0', 'know-streaming');
|
||||||
```
|
```
|
||||||
|
|
||||||
### 升级至 `3.3.0` 版本
|
### 升级至 `3.3.0` 版本
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
- [1、支持哪些 Kafka 版本?](#1支持哪些-kafka-版本)
|
- [1、支持哪些 Kafka 版本?](#1支持哪些-kafka-版本)
|
||||||
- [1、2.x 版本和 3.0 版本有什么差异?](#12x-版本和-30-版本有什么差异)
|
- [1、2.x 版本和 3.0 版本有什么差异?](#12x-版本和-30-版本有什么差异)
|
||||||
- [3、页面流量信息等无数据?](#3页面流量信息等无数据)
|
- [3、页面流量信息等无数据?](#3页面流量信息等无数据)
|
||||||
- [8.4、`Jmx`连接失败如何解决?](#84jmx连接失败如何解决)
|
- [4、`Jmx`连接失败如何解决?](#4jmx连接失败如何解决)
|
||||||
- [5、有没有 API 文档?](#5有没有-api-文档)
|
- [5、有没有 API 文档?](#5有没有-api-文档)
|
||||||
- [6、删除 Topic 成功后,为何过段时间又出现了?](#6删除-topic-成功后为何过段时间又出现了)
|
- [6、删除 Topic 成功后,为何过段时间又出现了?](#6删除-topic-成功后为何过段时间又出现了)
|
||||||
- [7、如何在不登录的情况下,调用接口?](#7如何在不登录的情况下调用接口)
|
- [7、如何在不登录的情况下,调用接口?](#7如何在不登录的情况下调用接口)
|
||||||
@@ -21,6 +21,7 @@
|
|||||||
- [15、测试时使用Testcontainers的说明](#15测试时使用testcontainers的说明)
|
- [15、测试时使用Testcontainers的说明](#15测试时使用testcontainers的说明)
|
||||||
- [16、JMX连接失败怎么办](#16jmx连接失败怎么办)
|
- [16、JMX连接失败怎么办](#16jmx连接失败怎么办)
|
||||||
- [17、zk监控无数据问题](#17zk监控无数据问题)
|
- [17、zk监控无数据问题](#17zk监控无数据问题)
|
||||||
|
- [18、启动失败,报NoClassDefFoundError如何解决](#18启动失败报noclassdeffounderror如何解决)
|
||||||
|
|
||||||
## 1、支持哪些 Kafka 版本?
|
## 1、支持哪些 Kafka 版本?
|
||||||
|
|
||||||
@@ -57,7 +58,7 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
## 8.4、`Jmx`连接失败如何解决?
|
## 4、`Jmx`连接失败如何解决?
|
||||||
|
|
||||||
- 参看 [Jmx 连接配置&问题解决](https://doc.knowstreaming.com/product/9-attachment#91jmx-%E8%BF%9E%E6%8E%A5%E5%A4%B1%E8%B4%A5%E9%97%AE%E9%A2%98%E8%A7%A3%E5%86%B3) 说明。
|
- 参看 [Jmx 连接配置&问题解决](https://doc.knowstreaming.com/product/9-attachment#91jmx-%E8%BF%9E%E6%8E%A5%E5%A4%B1%E8%B4%A5%E9%97%AE%E9%A2%98%E8%A7%A3%E5%86%B3) 说明。
|
||||||
|
|
||||||
@@ -278,3 +279,31 @@ zookeeper集群正常,但Ks上zk页面所有监控指标无数据,`KnowStrea
|
|||||||
```
|
```
|
||||||
4lw.commands.whitelist=*
|
4lw.commands.whitelist=*
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## 18、启动失败,报NoClassDefFoundError如何解决
|
||||||
|
|
||||||
|
**错误现象:**
|
||||||
|
```log
|
||||||
|
# 启动失败,报nested exception is java.lang.NoClassDefFoundError: Could not initialize class com.didiglobal.logi.job.core.WorkerSingleton$Singleton
|
||||||
|
|
||||||
|
|
||||||
|
2023-08-11 22:54:29.842 [main] ERROR class=org.springframework.boot.SpringApplication||Application run failed
|
||||||
|
org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'quartzScheduler' defined in class path resource [com/didiglobal/logi/job/LogIJobAutoConfiguration.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [com.didiglobal.logi.job.core.Scheduler]: Factory method 'quartzScheduler' threw exception; nested exception is java.lang.NoClassDefFoundError: Could not initialize class com.didiglobal.logi.job.core.WorkerSingleton$Singleton
|
||||||
|
at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:657)
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
**问题原因:**
|
||||||
|
1. `KnowStreaming` 依赖的 `Logi-Job` 初始化 `WorkerSingleton$Singleton` 失败。
|
||||||
|
2. `WorkerSingleton$Singleton` 初始化的过程中,会去获取一些操作系统的信息,如果获取时出现了异常,则会导致 `WorkerSingleton$Singleton` 初始化失败。
|
||||||
|
|
||||||
|
|
||||||
|
**临时建议:**
|
||||||
|
|
||||||
|
`Logi-Job` 问题的修复时间不好控制,之前我们测试验证了一下,在 `Windows`、`Mac`、`CentOS` 这几个操作系统下基本上都是可以正常运行的。
|
||||||
|
|
||||||
|
所以,如果有条件的话,可以暂时先使用这几个系统部署 `KnowStreaming`。
|
||||||
|
|
||||||
|
如果在在 `Windows`、`Mac`、`CentOS` 这几个操作系统下也出现了启动失败的问题,可以重试2-3次看是否还是启动失败,或者换一台机器试试。
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO;
|
|||||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.connect.connector.ConnectorStateVO;
|
import com.xiaojukeji.know.streaming.km.common.bean.vo.connect.connector.ConnectorStateVO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant;
|
import com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.OpConnectorService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.plugin.PluginService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.plugin.PluginService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerConnectorService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerConnectorService;
|
||||||
import org.apache.kafka.connect.runtime.AbstractStatus;
|
import org.apache.kafka.connect.runtime.AbstractStatus;
|
||||||
@@ -30,6 +31,9 @@ public class ConnectorManagerImpl implements ConnectorManager {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private ConnectorService connectorService;
|
private ConnectorService connectorService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private OpConnectorService opConnectorService;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private WorkerConnectorService workerConnectorService;
|
private WorkerConnectorService workerConnectorService;
|
||||||
|
|
||||||
@@ -44,24 +48,24 @@ public class ConnectorManagerImpl implements ConnectorManager {
|
|||||||
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "Connector参数错误");
|
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "Connector参数错误");
|
||||||
}
|
}
|
||||||
|
|
||||||
return connectorService.updateConnectorConfig(connectClusterId, connectorName, configs, operator);
|
return opConnectorService.updateConnectorConfig(connectClusterId, connectorName, configs, operator);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Result<Void> createConnector(ConnectorCreateDTO dto, String operator) {
|
public Result<Void> createConnector(ConnectorCreateDTO dto, String operator) {
|
||||||
dto.getSuitableConfig().put(KafkaConnectConstant.MIRROR_MAKER_NAME_FIELD_NAME, dto.getConnectorName());
|
dto.getSuitableConfig().put(KafkaConnectConstant.MIRROR_MAKER_NAME_FIELD_NAME, dto.getConnectorName());
|
||||||
|
|
||||||
Result<KSConnectorInfo> createResult = connectorService.createConnector(dto.getConnectClusterId(), dto.getConnectorName(), dto.getSuitableConfig(), operator);
|
Result<KSConnectorInfo> createResult = opConnectorService.createConnector(dto.getConnectClusterId(), dto.getConnectorName(), dto.getSuitableConfig(), operator);
|
||||||
if (createResult.failed()) {
|
if (createResult.failed()) {
|
||||||
return Result.buildFromIgnoreData(createResult);
|
return Result.buildFromIgnoreData(createResult);
|
||||||
}
|
}
|
||||||
|
|
||||||
Result<KSConnector> ksConnectorResult = connectorService.getAllConnectorInfoFromCluster(dto.getConnectClusterId(), dto.getConnectorName());
|
Result<KSConnector> ksConnectorResult = connectorService.getConnectorFromKafka(dto.getConnectClusterId(), dto.getConnectorName());
|
||||||
if (ksConnectorResult.failed()) {
|
if (ksConnectorResult.failed()) {
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.SUCCESS, "创建成功,但是获取元信息失败,页面元信息会存在1分钟延迟");
|
return Result.buildFromRSAndMsg(ResultStatus.SUCCESS, "创建成功,但是获取元信息失败,页面元信息会存在1分钟延迟");
|
||||||
}
|
}
|
||||||
|
|
||||||
connectorService.addNewToDB(ksConnectorResult.getData());
|
opConnectorService.addNewToDB(ksConnectorResult.getData());
|
||||||
return Result.buildSuc();
|
return Result.buildSuc();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -69,12 +73,12 @@ public class ConnectorManagerImpl implements ConnectorManager {
|
|||||||
public Result<Void> createConnector(ConnectorCreateDTO dto, String heartbeatName, String checkpointName, String operator) {
|
public Result<Void> createConnector(ConnectorCreateDTO dto, String heartbeatName, String checkpointName, String operator) {
|
||||||
dto.getSuitableConfig().put(KafkaConnectConstant.MIRROR_MAKER_NAME_FIELD_NAME, dto.getConnectorName());
|
dto.getSuitableConfig().put(KafkaConnectConstant.MIRROR_MAKER_NAME_FIELD_NAME, dto.getConnectorName());
|
||||||
|
|
||||||
Result<KSConnectorInfo> createResult = connectorService.createConnector(dto.getConnectClusterId(), dto.getConnectorName(), dto.getSuitableConfig(), operator);
|
Result<KSConnectorInfo> createResult = opConnectorService.createConnector(dto.getConnectClusterId(), dto.getConnectorName(), dto.getSuitableConfig(), operator);
|
||||||
if (createResult.failed()) {
|
if (createResult.failed()) {
|
||||||
return Result.buildFromIgnoreData(createResult);
|
return Result.buildFromIgnoreData(createResult);
|
||||||
}
|
}
|
||||||
|
|
||||||
Result<KSConnector> ksConnectorResult = connectorService.getAllConnectorInfoFromCluster(dto.getConnectClusterId(), dto.getConnectorName());
|
Result<KSConnector> ksConnectorResult = connectorService.getConnectorFromKafka(dto.getConnectClusterId(), dto.getConnectorName());
|
||||||
if (ksConnectorResult.failed()) {
|
if (ksConnectorResult.failed()) {
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.SUCCESS, "创建成功,但是获取元信息失败,页面元信息会存在1分钟延迟");
|
return Result.buildFromRSAndMsg(ResultStatus.SUCCESS, "创建成功,但是获取元信息失败,页面元信息会存在1分钟延迟");
|
||||||
}
|
}
|
||||||
@@ -83,7 +87,7 @@ public class ConnectorManagerImpl implements ConnectorManager {
|
|||||||
connector.setCheckpointConnectorName(checkpointName);
|
connector.setCheckpointConnectorName(checkpointName);
|
||||||
connector.setHeartbeatConnectorName(heartbeatName);
|
connector.setHeartbeatConnectorName(heartbeatName);
|
||||||
|
|
||||||
connectorService.addNewToDB(connector);
|
opConnectorService.addNewToDB(connector);
|
||||||
return Result.buildSuc();
|
return Result.buildSuc();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -37,6 +37,7 @@ import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
|||||||
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService;
|
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.OpConnectorService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.mm2.MirrorMakerMetricService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.mm2.MirrorMakerMetricService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.plugin.PluginService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.plugin.PluginService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerConnectorService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerConnectorService;
|
||||||
@@ -67,6 +68,9 @@ public class MirrorMakerManagerImpl implements MirrorMakerManager {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private ConnectorService connectorService;
|
private ConnectorService connectorService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private OpConnectorService opConnectorService;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private WorkerConnectorService workerConnectorService;
|
private WorkerConnectorService workerConnectorService;
|
||||||
|
|
||||||
@@ -156,20 +160,20 @@ public class MirrorMakerManagerImpl implements MirrorMakerManager {
|
|||||||
|
|
||||||
Result<Void> rv = Result.buildSuc();
|
Result<Void> rv = Result.buildSuc();
|
||||||
if (!ValidateUtils.isBlank(connectorPO.getCheckpointConnectorName())) {
|
if (!ValidateUtils.isBlank(connectorPO.getCheckpointConnectorName())) {
|
||||||
rv = connectorService.deleteConnector(connectClusterId, connectorPO.getCheckpointConnectorName(), operator);
|
rv = opConnectorService.deleteConnector(connectClusterId, connectorPO.getCheckpointConnectorName(), operator);
|
||||||
}
|
}
|
||||||
if (rv.failed()) {
|
if (rv.failed()) {
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!ValidateUtils.isBlank(connectorPO.getHeartbeatConnectorName())) {
|
if (!ValidateUtils.isBlank(connectorPO.getHeartbeatConnectorName())) {
|
||||||
rv = connectorService.deleteConnector(connectClusterId, connectorPO.getHeartbeatConnectorName(), operator);
|
rv = opConnectorService.deleteConnector(connectClusterId, connectorPO.getHeartbeatConnectorName(), operator);
|
||||||
}
|
}
|
||||||
if (rv.failed()) {
|
if (rv.failed()) {
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
return connectorService.deleteConnector(connectClusterId, sourceConnectorName, operator);
|
return opConnectorService.deleteConnector(connectClusterId, sourceConnectorName, operator);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -181,20 +185,20 @@ public class MirrorMakerManagerImpl implements MirrorMakerManager {
|
|||||||
|
|
||||||
Result<Void> rv = Result.buildSuc();
|
Result<Void> rv = Result.buildSuc();
|
||||||
if (!ValidateUtils.isBlank(connectorPO.getCheckpointConnectorName()) && dto.getCheckpointConnectorConfigs() != null) {
|
if (!ValidateUtils.isBlank(connectorPO.getCheckpointConnectorName()) && dto.getCheckpointConnectorConfigs() != null) {
|
||||||
rv = connectorService.updateConnectorConfig(dto.getConnectClusterId(), connectorPO.getCheckpointConnectorName(), dto.getCheckpointConnectorConfigs(), operator);
|
rv = opConnectorService.updateConnectorConfig(dto.getConnectClusterId(), connectorPO.getCheckpointConnectorName(), dto.getCheckpointConnectorConfigs(), operator);
|
||||||
}
|
}
|
||||||
if (rv.failed()) {
|
if (rv.failed()) {
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!ValidateUtils.isBlank(connectorPO.getHeartbeatConnectorName()) && dto.getHeartbeatConnectorConfigs() != null) {
|
if (!ValidateUtils.isBlank(connectorPO.getHeartbeatConnectorName()) && dto.getHeartbeatConnectorConfigs() != null) {
|
||||||
rv = connectorService.updateConnectorConfig(dto.getConnectClusterId(), connectorPO.getHeartbeatConnectorName(), dto.getHeartbeatConnectorConfigs(), operator);
|
rv = opConnectorService.updateConnectorConfig(dto.getConnectClusterId(), connectorPO.getHeartbeatConnectorName(), dto.getHeartbeatConnectorConfigs(), operator);
|
||||||
}
|
}
|
||||||
if (rv.failed()) {
|
if (rv.failed()) {
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
return connectorService.updateConnectorConfig(dto.getConnectClusterId(), dto.getConnectorName(), dto.getSuitableConfig(), operator);
|
return opConnectorService.updateConnectorConfig(dto.getConnectClusterId(), dto.getConnectorName(), dto.getSuitableConfig(), operator);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -206,20 +210,20 @@ public class MirrorMakerManagerImpl implements MirrorMakerManager {
|
|||||||
|
|
||||||
Result<Void> rv = Result.buildSuc();
|
Result<Void> rv = Result.buildSuc();
|
||||||
if (!ValidateUtils.isBlank(connectorPO.getCheckpointConnectorName())) {
|
if (!ValidateUtils.isBlank(connectorPO.getCheckpointConnectorName())) {
|
||||||
rv = connectorService.restartConnector(connectClusterId, connectorPO.getCheckpointConnectorName(), operator);
|
rv = opConnectorService.restartConnector(connectClusterId, connectorPO.getCheckpointConnectorName(), operator);
|
||||||
}
|
}
|
||||||
if (rv.failed()) {
|
if (rv.failed()) {
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!ValidateUtils.isBlank(connectorPO.getHeartbeatConnectorName())) {
|
if (!ValidateUtils.isBlank(connectorPO.getHeartbeatConnectorName())) {
|
||||||
rv = connectorService.restartConnector(connectClusterId, connectorPO.getHeartbeatConnectorName(), operator);
|
rv = opConnectorService.restartConnector(connectClusterId, connectorPO.getHeartbeatConnectorName(), operator);
|
||||||
}
|
}
|
||||||
if (rv.failed()) {
|
if (rv.failed()) {
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
return connectorService.restartConnector(connectClusterId, sourceConnectorName, operator);
|
return opConnectorService.restartConnector(connectClusterId, sourceConnectorName, operator);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -231,20 +235,20 @@ public class MirrorMakerManagerImpl implements MirrorMakerManager {
|
|||||||
|
|
||||||
Result<Void> rv = Result.buildSuc();
|
Result<Void> rv = Result.buildSuc();
|
||||||
if (!ValidateUtils.isBlank(connectorPO.getCheckpointConnectorName())) {
|
if (!ValidateUtils.isBlank(connectorPO.getCheckpointConnectorName())) {
|
||||||
rv = connectorService.stopConnector(connectClusterId, connectorPO.getCheckpointConnectorName(), operator);
|
rv = opConnectorService.stopConnector(connectClusterId, connectorPO.getCheckpointConnectorName(), operator);
|
||||||
}
|
}
|
||||||
if (rv.failed()) {
|
if (rv.failed()) {
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!ValidateUtils.isBlank(connectorPO.getHeartbeatConnectorName())) {
|
if (!ValidateUtils.isBlank(connectorPO.getHeartbeatConnectorName())) {
|
||||||
rv = connectorService.stopConnector(connectClusterId, connectorPO.getHeartbeatConnectorName(), operator);
|
rv = opConnectorService.stopConnector(connectClusterId, connectorPO.getHeartbeatConnectorName(), operator);
|
||||||
}
|
}
|
||||||
if (rv.failed()) {
|
if (rv.failed()) {
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
return connectorService.stopConnector(connectClusterId, sourceConnectorName, operator);
|
return opConnectorService.stopConnector(connectClusterId, sourceConnectorName, operator);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -256,20 +260,20 @@ public class MirrorMakerManagerImpl implements MirrorMakerManager {
|
|||||||
|
|
||||||
Result<Void> rv = Result.buildSuc();
|
Result<Void> rv = Result.buildSuc();
|
||||||
if (!ValidateUtils.isBlank(connectorPO.getCheckpointConnectorName())) {
|
if (!ValidateUtils.isBlank(connectorPO.getCheckpointConnectorName())) {
|
||||||
rv = connectorService.resumeConnector(connectClusterId, connectorPO.getCheckpointConnectorName(), operator);
|
rv = opConnectorService.resumeConnector(connectClusterId, connectorPO.getCheckpointConnectorName(), operator);
|
||||||
}
|
}
|
||||||
if (rv.failed()) {
|
if (rv.failed()) {
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!ValidateUtils.isBlank(connectorPO.getHeartbeatConnectorName())) {
|
if (!ValidateUtils.isBlank(connectorPO.getHeartbeatConnectorName())) {
|
||||||
rv = connectorService.resumeConnector(connectClusterId, connectorPO.getHeartbeatConnectorName(), operator);
|
rv = opConnectorService.resumeConnector(connectClusterId, connectorPO.getHeartbeatConnectorName(), operator);
|
||||||
}
|
}
|
||||||
if (rv.failed()) {
|
if (rv.failed()) {
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
return connectorService.resumeConnector(connectClusterId, sourceConnectorName, operator);
|
return opConnectorService.resumeConnector(connectClusterId, sourceConnectorName, operator);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ public class ConnectConnectorMetricCollector extends AbstractConnectMetricCollec
|
|||||||
Long connectClusterId = connectCluster.getId();
|
Long connectClusterId = connectCluster.getId();
|
||||||
|
|
||||||
List<VersionControlItem> items = versionControlService.listVersionControlItem(this.getClusterVersion(connectCluster), collectorType().getCode());
|
List<VersionControlItem> items = versionControlService.listVersionControlItem(this.getClusterVersion(connectCluster), collectorType().getCode());
|
||||||
Result<List<String>> connectorList = connectorService.listConnectorsFromCluster(connectClusterId);
|
Result<List<String>> connectorList = connectorService.listConnectorsFromCluster(connectCluster);
|
||||||
|
|
||||||
FutureWaitUtil<Void> future = this.getFutureUtilByClusterPhyId(connectClusterId);
|
FutureWaitUtil<Void> future = this.getFutureUtilByClusterPhyId(connectClusterId);
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect;
|
package com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect;
|
||||||
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BaseMetrics;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BaseMetrics;
|
||||||
import lombok.AllArgsConstructor;
|
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
import lombok.NoArgsConstructor;
|
import lombok.NoArgsConstructor;
|
||||||
import lombok.ToString;
|
import lombok.ToString;
|
||||||
@@ -12,20 +11,18 @@ import lombok.ToString;
|
|||||||
*/
|
*/
|
||||||
@Data
|
@Data
|
||||||
@NoArgsConstructor
|
@NoArgsConstructor
|
||||||
@AllArgsConstructor
|
|
||||||
@ToString
|
@ToString
|
||||||
public class ConnectClusterMetrics extends BaseMetrics {
|
public class ConnectClusterMetrics extends BaseMetrics {
|
||||||
private Long connectClusterId;
|
protected Long connectClusterId;
|
||||||
|
|
||||||
public ConnectClusterMetrics(Long clusterPhyId, Long connectClusterId){
|
public ConnectClusterMetrics(Long clusterPhyId, Long connectClusterId ){
|
||||||
super(clusterPhyId);
|
super(clusterPhyId);
|
||||||
this.connectClusterId = connectClusterId;
|
this.connectClusterId = connectClusterId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ConnectClusterMetrics initWithMetric(Long connectClusterId, String metric, Float value) {
|
public ConnectClusterMetrics(Long connectClusterId, String metricName, Float metricValue) {
|
||||||
ConnectClusterMetrics brokerMetrics = new ConnectClusterMetrics(connectClusterId, connectClusterId);
|
this(null, connectClusterId);
|
||||||
brokerMetrics.putMetric(metric, value);
|
this.putMetric(metricName, metricValue);
|
||||||
return brokerMetrics;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect;
|
package com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect;
|
||||||
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BaseMetrics;
|
|
||||||
import lombok.AllArgsConstructor;
|
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
import lombok.NoArgsConstructor;
|
import lombok.NoArgsConstructor;
|
||||||
import lombok.ToString;
|
import lombok.ToString;
|
||||||
@@ -11,25 +9,19 @@ import lombok.ToString;
|
|||||||
* @date 2022/11/2
|
* @date 2022/11/2
|
||||||
*/
|
*/
|
||||||
@Data
|
@Data
|
||||||
@AllArgsConstructor
|
|
||||||
@NoArgsConstructor
|
@NoArgsConstructor
|
||||||
@ToString
|
@ToString
|
||||||
public class ConnectWorkerMetrics extends BaseMetrics {
|
public class ConnectWorkerMetrics extends ConnectClusterMetrics {
|
||||||
|
|
||||||
private Long connectClusterId;
|
|
||||||
|
|
||||||
private String workerId;
|
private String workerId;
|
||||||
|
|
||||||
public static ConnectWorkerMetrics initWithMetric(Long connectClusterId, String workerId, String metric, Float value) {
|
public ConnectWorkerMetrics(Long connectClusterId, String workerId, String metricName, Float metricValue) {
|
||||||
ConnectWorkerMetrics connectWorkerMetrics = new ConnectWorkerMetrics();
|
super(null, connectClusterId);
|
||||||
connectWorkerMetrics.setConnectClusterId(connectClusterId);
|
this.workerId = workerId;
|
||||||
connectWorkerMetrics.setWorkerId(workerId);
|
this.putMetric(metricName, metricValue);
|
||||||
connectWorkerMetrics.putMetric(metric, value);
|
|
||||||
return connectWorkerMetrics;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String unique() {
|
public String unique() {
|
||||||
return "KCC@" + clusterPhyId + "@" + connectClusterId + "@" + workerId;
|
return "KCW@" + clusterPhyId + "@" + connectClusterId + "@" + workerId;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect;
|
package com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect;
|
||||||
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BaseMetrics;
|
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
import lombok.NoArgsConstructor;
|
import lombok.NoArgsConstructor;
|
||||||
import lombok.ToString;
|
import lombok.ToString;
|
||||||
@@ -12,24 +11,21 @@ import lombok.ToString;
|
|||||||
@Data
|
@Data
|
||||||
@NoArgsConstructor
|
@NoArgsConstructor
|
||||||
@ToString
|
@ToString
|
||||||
public class ConnectorMetrics extends BaseMetrics {
|
public class ConnectorMetrics extends ConnectClusterMetrics {
|
||||||
private Long connectClusterId;
|
protected String connectorName;
|
||||||
|
|
||||||
private String connectorName;
|
protected String connectorNameAndClusterId;
|
||||||
|
|
||||||
private String connectorNameAndClusterId;
|
|
||||||
|
|
||||||
public ConnectorMetrics(Long connectClusterId, String connectorName) {
|
public ConnectorMetrics(Long connectClusterId, String connectorName) {
|
||||||
super(null);
|
super(null, connectClusterId);
|
||||||
this.connectClusterId = connectClusterId;
|
this.connectClusterId = connectClusterId;
|
||||||
this.connectorName = connectorName;
|
this.connectorName = connectorName;
|
||||||
this.connectorNameAndClusterId = connectorName + "#" + connectClusterId;
|
this.connectorNameAndClusterId = connectorName + "#" + connectClusterId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ConnectorMetrics initWithMetric(Long connectClusterId, String connectorName, String metricName, Float value) {
|
public ConnectorMetrics(Long connectClusterId, String connectorName, String metricName, Float metricValue) {
|
||||||
ConnectorMetrics metrics = new ConnectorMetrics(connectClusterId, connectorName);
|
this(connectClusterId, connectorName);
|
||||||
metrics.putMetric(metricName, value);
|
this.putMetric(metricName, metricValue);
|
||||||
return metrics;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect;
|
package com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect;
|
||||||
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.BaseMetrics;
|
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
import lombok.NoArgsConstructor;
|
import lombok.NoArgsConstructor;
|
||||||
import lombok.ToString;
|
import lombok.ToString;
|
||||||
@@ -12,11 +11,7 @@ import lombok.ToString;
|
|||||||
@Data
|
@Data
|
||||||
@NoArgsConstructor
|
@NoArgsConstructor
|
||||||
@ToString
|
@ToString
|
||||||
public class ConnectorTaskMetrics extends BaseMetrics {
|
public class ConnectorTaskMetrics extends ConnectorMetrics {
|
||||||
private Long connectClusterId;
|
|
||||||
|
|
||||||
private String connectorName;
|
|
||||||
|
|
||||||
private Integer taskId;
|
private Integer taskId;
|
||||||
|
|
||||||
public ConnectorTaskMetrics(Long connectClusterId, String connectorName, Integer taskId) {
|
public ConnectorTaskMetrics(Long connectClusterId, String connectorName, Integer taskId) {
|
||||||
@@ -25,14 +20,13 @@ public class ConnectorTaskMetrics extends BaseMetrics {
|
|||||||
this.taskId = taskId;
|
this.taskId = taskId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ConnectorTaskMetrics initWithMetric(Long connectClusterId, String connectorName, Integer taskId, String metricName, Float value) {
|
public ConnectorTaskMetrics(Long connectClusterId, String connectorName, Integer taskId, String metricName, Float metricValue) {
|
||||||
ConnectorTaskMetrics metrics = new ConnectorTaskMetrics(connectClusterId, connectorName, taskId);
|
this(connectClusterId, connectorName, taskId);
|
||||||
metrics.putMetric(metricName,value);
|
this.putMetric(metricName, metricValue);
|
||||||
return metrics;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String unique() {
|
public String unique() {
|
||||||
return "KCOR@" + connectClusterId + "@" + connectorName + "@" + taskId;
|
return "KCORT@" + connectClusterId + "@" + connectorName + "@" + taskId;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,16 @@
|
|||||||
|
package com.xiaojukeji.know.streaming.km.common.bean.event.cluster.connect;
|
||||||
|
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.event.cluster.ClusterPhyBaseEvent;
|
||||||
|
import lombok.Getter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 集群删除事件
|
||||||
|
* @author zengqiao
|
||||||
|
* @date 23/08/15
|
||||||
|
*/
|
||||||
|
@Getter
|
||||||
|
public class ClusterPhyDeletedEvent extends ClusterPhyBaseEvent {
|
||||||
|
public ClusterPhyDeletedEvent(Object source, Long clusterPhyId) {
|
||||||
|
super(source, clusterPhyId);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -16,6 +16,8 @@ import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiL
|
|||||||
import com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant;
|
import com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.CommonUtils;
|
import com.xiaojukeji.know.streaming.km.common.utils.CommonUtils;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.Triple;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
@@ -24,6 +26,9 @@ import java.util.Map;
|
|||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant.MIRROR_MAKER_SOURCE_CLUSTER_BOOTSTRAP_SERVERS_FIELD_NAME;
|
||||||
|
import static com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant.MIRROR_MAKER_TARGET_CLUSTER_BOOTSTRAP_SERVERS_FIELD_NAME;
|
||||||
|
|
||||||
public class ConnectConverter {
|
public class ConnectConverter {
|
||||||
public static ConnectorBasicCombineExistVO convert2BasicVO(ConnectCluster connectCluster, ConnectorPO connectorPO) {
|
public static ConnectorBasicCombineExistVO convert2BasicVO(ConnectCluster connectCluster, ConnectorPO connectorPO) {
|
||||||
ConnectorBasicCombineExistVO vo = new ConnectorBasicCombineExistVO();
|
ConnectorBasicCombineExistVO vo = new ConnectorBasicCombineExistVO();
|
||||||
@@ -153,6 +158,66 @@ public class ConnectConverter {
|
|||||||
return ksConnector;
|
return ksConnector;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static List<KSConnector> convertAndSupplyMirrorMakerInfo(ConnectCluster connectCluster, List<Triple<KSConnectorInfo, List<String>, KSConnectorStateInfo>> connectorFullInfoList) {
|
||||||
|
// <connectorName, targetBootstrapServers + "@" + sourceBootstrapServers>
|
||||||
|
Map<String, String> sourceMap = new HashMap<>();
|
||||||
|
|
||||||
|
// <targetBootstrapServers + "@" + sourceBootstrapServers, connectorName>
|
||||||
|
Map<String, String> heartbeatMap = new HashMap<>();
|
||||||
|
Map<String, String> checkpointMap = new HashMap<>();
|
||||||
|
|
||||||
|
// 获取每个类型的connector的map信息
|
||||||
|
connectorFullInfoList.forEach(connector -> {
|
||||||
|
Map<String, String> mm2Map = null;
|
||||||
|
if (KafkaConnectConstant.MIRROR_MAKER_SOURCE_CONNECTOR_TYPE.equals(connector.v1().getConfig().get(KafkaConnectConstant.CONNECTOR_CLASS_FILED_NAME))) {
|
||||||
|
mm2Map = sourceMap;
|
||||||
|
} else if (KafkaConnectConstant.MIRROR_MAKER_HEARTBEAT_CONNECTOR_TYPE.equals(connector.v1().getConfig().get(KafkaConnectConstant.CONNECTOR_CLASS_FILED_NAME))) {
|
||||||
|
mm2Map = heartbeatMap;
|
||||||
|
} else if (KafkaConnectConstant.MIRROR_MAKER_CHECKPOINT_CONNECTOR_TYPE.equals(connector.v1().getConfig().get(KafkaConnectConstant.CONNECTOR_CLASS_FILED_NAME))) {
|
||||||
|
mm2Map = checkpointMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
String targetBootstrapServers = connector.v1().getConfig().get(MIRROR_MAKER_TARGET_CLUSTER_BOOTSTRAP_SERVERS_FIELD_NAME);
|
||||||
|
String sourceBootstrapServers = connector.v1().getConfig().get(MIRROR_MAKER_SOURCE_CLUSTER_BOOTSTRAP_SERVERS_FIELD_NAME);
|
||||||
|
|
||||||
|
if (ValidateUtils.anyBlank(targetBootstrapServers, sourceBootstrapServers) || mm2Map == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (KafkaConnectConstant.MIRROR_MAKER_SOURCE_CONNECTOR_TYPE.equals(connector.v1().getConfig().get(KafkaConnectConstant.CONNECTOR_CLASS_FILED_NAME))) {
|
||||||
|
// source 类型的格式和 heartbeat & checkpoint 的不一样
|
||||||
|
mm2Map.put(connector.v1().getName(), targetBootstrapServers + "@" + sourceBootstrapServers);
|
||||||
|
} else {
|
||||||
|
mm2Map.put(targetBootstrapServers + "@" + sourceBootstrapServers, connector.v1().getName());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
List<KSConnector> connectorList = new ArrayList<>();
|
||||||
|
connectorFullInfoList.forEach(connector -> {
|
||||||
|
// 转化并添加到list中
|
||||||
|
KSConnector ksConnector = ConnectConverter.convert2KSConnector(
|
||||||
|
connectCluster.getKafkaClusterPhyId(),
|
||||||
|
connectCluster.getId(),
|
||||||
|
connector.v1(),
|
||||||
|
connector.v3(),
|
||||||
|
connector.v2()
|
||||||
|
);
|
||||||
|
connectorList.add(ksConnector);
|
||||||
|
|
||||||
|
// 补充mm2信息
|
||||||
|
String targetAndSource = sourceMap.get(ksConnector.getConnectorName());
|
||||||
|
if (ValidateUtils.isBlank(targetAndSource)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
ksConnector.setHeartbeatConnectorName(heartbeatMap.getOrDefault(targetAndSource, ""));
|
||||||
|
ksConnector.setCheckpointConnectorName(checkpointMap.getOrDefault(targetAndSource, ""));
|
||||||
|
});
|
||||||
|
|
||||||
|
return connectorList;
|
||||||
|
}
|
||||||
|
|
||||||
private static String genConnectorKey(Long connectorId, String connectorName){
|
private static String genConnectorKey(Long connectorId, String connectorName){
|
||||||
return connectorId + "#" + connectorName;
|
return connectorId + "#" + connectorName;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,50 @@
|
|||||||
|
package com.xiaojukeji.know.streaming.km.common.enums.connect;
|
||||||
|
|
||||||
|
import org.apache.kafka.connect.runtime.AbstractStatus;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* connector运行状态
|
||||||
|
* @see AbstractStatus
|
||||||
|
*/
|
||||||
|
public enum ConnectStatusEnum {
|
||||||
|
UNASSIGNED(0, "UNASSIGNED"),
|
||||||
|
|
||||||
|
RUNNING(1,"RUNNING"),
|
||||||
|
|
||||||
|
PAUSED(2,"PAUSED"),
|
||||||
|
|
||||||
|
FAILED(3, "FAILED"),
|
||||||
|
|
||||||
|
DESTROYED(4, "DESTROYED"),
|
||||||
|
|
||||||
|
UNKNOWN(-1, "UNKNOWN")
|
||||||
|
|
||||||
|
;
|
||||||
|
|
||||||
|
ConnectStatusEnum(int status, String value) {
|
||||||
|
this.status = status;
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
private final int status;
|
||||||
|
|
||||||
|
private final String value;
|
||||||
|
|
||||||
|
public static ConnectStatusEnum getByValue(String value) {
|
||||||
|
for (ConnectStatusEnum statusEnum: ConnectStatusEnum.values()) {
|
||||||
|
if (statusEnum.value.equals(value)) {
|
||||||
|
return statusEnum;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ConnectStatusEnum.UNKNOWN;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getStatus() {
|
||||||
|
return status;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getValue() {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -96,7 +96,7 @@ const RoleDetailAndUpdate = forwardRef((props, ref): JSX.Element => {
|
|||||||
arr.push(permissions[i].id);
|
arr.push(permissions[i].id);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
formData.permissionIdList = formData.permissionIdList.flat();
|
formData.permissionIdList = formData.permissionIdList.flat().filter((item) => item !== undefined);
|
||||||
setConfirmLoading(true);
|
setConfirmLoading(true);
|
||||||
request(api.editRole, {
|
request(api.editRole, {
|
||||||
method: type === RoleOperate.Add ? 'POST' : 'PUT',
|
method: type === RoleOperate.Add ? 'POST' : 'PUT',
|
||||||
@@ -250,7 +250,7 @@ const RoleDetailAndUpdate = forwardRef((props, ref): JSX.Element => {
|
|||||||
<CheckboxGroupContainer
|
<CheckboxGroupContainer
|
||||||
key={i}
|
key={i}
|
||||||
formInstance={form}
|
formInstance={form}
|
||||||
fieldName="permissionIdList"
|
fieldName={`permissionIdList`}
|
||||||
options={permission.options}
|
options={permission.options}
|
||||||
initSelectedOptions={initSelectedPermissions[permission.id] || []}
|
initSelectedOptions={initSelectedPermissions[permission.id] || []}
|
||||||
groupIdx={i}
|
groupIdx={i}
|
||||||
|
|||||||
@@ -34,11 +34,11 @@ module.exports = {
|
|||||||
proxy: {
|
proxy: {
|
||||||
'/ks-km/api/v3': {
|
'/ks-km/api/v3': {
|
||||||
changeOrigin: true,
|
changeOrigin: true,
|
||||||
target: 'https://api-kylin-xg02.intra.xiaojukeji.com/ks-km/',
|
target: 'http://127.0.0.1/',
|
||||||
},
|
},
|
||||||
'/logi-security/api/v1': {
|
'/logi-security/api/v1': {
|
||||||
changeOrigin: true,
|
changeOrigin: true,
|
||||||
target: 'https://api-kylin-xg02.intra.xiaojukeji.com/ks-km/',
|
target: 'http://127.0.0.1/',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -10004,6 +10004,12 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"pubsub-js": {
|
||||||
|
"version": "1.9.4",
|
||||||
|
"resolved": "https://registry.npmmirror.com/pubsub-js/-/pubsub-js-1.9.4.tgz",
|
||||||
|
"integrity": "sha512-hJYpaDvPH4w8ZX/0Fdf9ma1AwRgU353GfbaVfPjfJQf1KxZ2iHaHl3fAUw1qlJIR5dr4F3RzjGaWohYUEyoh7A==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
"pump": {
|
"pump": {
|
||||||
"version": "3.0.0",
|
"version": "3.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/pump/-/pump-3.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/pump/-/pump-3.0.0.tgz",
|
||||||
|
|||||||
@@ -82,6 +82,7 @@
|
|||||||
"@types/lodash": "^4.14.171",
|
"@types/lodash": "^4.14.171",
|
||||||
"@types/node": "^12.12.25",
|
"@types/node": "^12.12.25",
|
||||||
"@types/pubsub-js": "^1.5.18",
|
"@types/pubsub-js": "^1.5.18",
|
||||||
|
"pubsub-js": "^1.5.18",
|
||||||
"@typescript-eslint/eslint-plugin": "4.13.0",
|
"@typescript-eslint/eslint-plugin": "4.13.0",
|
||||||
"@typescript-eslint/parser": "4.13.0",
|
"@typescript-eslint/parser": "4.13.0",
|
||||||
"babel-eslint": "10.1.0",
|
"babel-eslint": "10.1.0",
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ const api = {
|
|||||||
getApi(`/clusters/${clusterPhyId}/groups/${groupName}/partitions`),
|
getApi(`/clusters/${clusterPhyId}/groups/${groupName}/partitions`),
|
||||||
resetGroupOffset: () => getApi('/group-offsets'),
|
resetGroupOffset: () => getApi('/group-offsets'),
|
||||||
getGroupOverview: (clusterPhyId: number) => getApi(`/clusters/${clusterPhyId}/groups-overview`),
|
getGroupOverview: (clusterPhyId: number) => getApi(`/clusters/${clusterPhyId}/groups-overview`),
|
||||||
|
deleteGroupOffset: () => getApi('/group-offsets'),
|
||||||
// topics列表
|
// topics列表
|
||||||
getTopicsList: (clusterPhyId: number) => getApi(`/clusters/${clusterPhyId}/topics-overview`),
|
getTopicsList: (clusterPhyId: number) => getApi(`/clusters/${clusterPhyId}/topics-overview`),
|
||||||
getReassignmentList: () => getApi(`/reassignment/topics-overview`),
|
getReassignmentList: () => getApi(`/reassignment/topics-overview`),
|
||||||
@@ -108,6 +108,7 @@ const api = {
|
|||||||
getTopicState: (clusterPhyId: number, topicName: string) => getApi(`/clusters/${clusterPhyId}/topics/${topicName}/state`),
|
getTopicState: (clusterPhyId: number, topicName: string) => getApi(`/clusters/${clusterPhyId}/topics/${topicName}/state`),
|
||||||
getTopicMetadata: (clusterPhyId: number, topicName: string) =>
|
getTopicMetadata: (clusterPhyId: number, topicName: string) =>
|
||||||
getApi(`/clusters/${clusterPhyId}/topics/${topicName}/metadata-combine-exist`),
|
getApi(`/clusters/${clusterPhyId}/topics/${topicName}/metadata-combine-exist`),
|
||||||
|
deleteTopicData: () => getApi(`/topics/truncate-topic`),
|
||||||
|
|
||||||
// 最新的指标值
|
// 最新的指标值
|
||||||
getMetricPointsLatest: (clusterPhyId: number) => getApi(`/physical-clusters/${clusterPhyId}/latest-metrics`),
|
getMetricPointsLatest: (clusterPhyId: number) => getApi(`/physical-clusters/${clusterPhyId}/latest-metrics`),
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ import { getLicenseInfo } from './constants/common';
|
|||||||
import api from './api';
|
import api from './api';
|
||||||
import ClusterContainer from './pages/index';
|
import ClusterContainer from './pages/index';
|
||||||
import ksLogo from './assets/ks-logo.png';
|
import ksLogo from './assets/ks-logo.png';
|
||||||
|
import {ClustersPermissionMap} from "./pages/CommonConfig";
|
||||||
|
|
||||||
interface ILocaleMap {
|
interface ILocaleMap {
|
||||||
[index: string]: any;
|
[index: string]: any;
|
||||||
@@ -78,6 +79,9 @@ const AppContent = (props: { setlanguage: (language: string) => void }) => {
|
|||||||
const userInfo = localStorage.getItem('userInfo');
|
const userInfo = localStorage.getItem('userInfo');
|
||||||
const [curActiveAppName, setCurActiveAppName] = useState('');
|
const [curActiveAppName, setCurActiveAppName] = useState('');
|
||||||
const [versionInfo, setVersionInfo] = useState<VersionInfo>();
|
const [versionInfo, setVersionInfo] = useState<VersionInfo>();
|
||||||
|
const [global] = AppContainer.useGlobalValue();
|
||||||
|
const quickEntries=[];
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (pathname.startsWith('/config')) {
|
if (pathname.startsWith('/config')) {
|
||||||
setCurActiveAppName('config');
|
setCurActiveAppName('config');
|
||||||
@@ -93,6 +97,23 @@ const AppContent = (props: { setlanguage: (language: string) => void }) => {
|
|||||||
});
|
});
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
if (global.hasPermission && global.hasPermission(ClustersPermissionMap.CLUSTERS_MANAGE_VIEW)){
|
||||||
|
quickEntries.push({
|
||||||
|
icon: <IconFont type="icon-duojiqunguanli"/>,
|
||||||
|
txt: '多集群管理',
|
||||||
|
ident: '',
|
||||||
|
active: curActiveAppName === 'cluster',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (global.hasPermission && global.hasPermission(ClustersPermissionMap.SYS_MANAGE_VIEW)){
|
||||||
|
quickEntries.push({
|
||||||
|
icon: <IconFont type="icon-xitongguanli" />,
|
||||||
|
txt: '系统管理',
|
||||||
|
ident: 'config',
|
||||||
|
active: curActiveAppName === 'config',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<DProLayout.Container
|
<DProLayout.Container
|
||||||
headerProps={{
|
headerProps={{
|
||||||
@@ -103,20 +124,7 @@ const AppContent = (props: { setlanguage: (language: string) => void }) => {
|
|||||||
),
|
),
|
||||||
username: userInfo ? JSON.parse(userInfo)?.userName : '',
|
username: userInfo ? JSON.parse(userInfo)?.userName : '',
|
||||||
icon: <DotChartOutlined />,
|
icon: <DotChartOutlined />,
|
||||||
quickEntries: [
|
quickEntries: quickEntries,
|
||||||
{
|
|
||||||
icon: <IconFont type="icon-duojiqunguanli" />,
|
|
||||||
txt: '多集群管理',
|
|
||||||
ident: '',
|
|
||||||
active: curActiveAppName === 'cluster',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
icon: <IconFont type="icon-xitongguanli" />,
|
|
||||||
txt: '系统管理',
|
|
||||||
ident: 'config',
|
|
||||||
active: curActiveAppName === 'config',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
isFixed: false,
|
isFixed: false,
|
||||||
userDropMenuItems: [
|
userDropMenuItems: [
|
||||||
<Menu.Item key={0}>
|
<Menu.Item key={0}>
|
||||||
|
|||||||
@@ -0,0 +1,10 @@
|
|||||||
|
import { useCallback, useState } from 'react';
|
||||||
|
|
||||||
|
export function useForceRefresh() {
|
||||||
|
const [refreshKey, setRefresh] = useState<number>(0);
|
||||||
|
const forceRefresh: () => void = useCallback(() => {
|
||||||
|
setRefresh((x) => x + 1);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return [refreshKey, forceRefresh];
|
||||||
|
}
|
||||||
@@ -7,6 +7,9 @@ import { goLogin } from '@src/constants/axiosConfig';
|
|||||||
export enum ClustersPermissionMap {
|
export enum ClustersPermissionMap {
|
||||||
CLUSTERS_MANAGE = '多集群管理',
|
CLUSTERS_MANAGE = '多集群管理',
|
||||||
CLUSTERS_MANAGE_VIEW = '多集群管理查看',
|
CLUSTERS_MANAGE_VIEW = '多集群管理查看',
|
||||||
|
//仅用作隐藏掉系统管理菜单
|
||||||
|
SYS_MANAGE = '系统管理',
|
||||||
|
SYS_MANAGE_VIEW = '系统管理查看',
|
||||||
// Cluster
|
// Cluster
|
||||||
CLUSTER_ADD = '接入集群',
|
CLUSTER_ADD = '接入集群',
|
||||||
CLUSTER_DEL = '删除集群',
|
CLUSTER_DEL = '删除集群',
|
||||||
@@ -39,6 +42,12 @@ export enum ClustersPermissionMap {
|
|||||||
MM2_DELETE = 'MM2-删除',
|
MM2_DELETE = 'MM2-删除',
|
||||||
MM2_RESTART = 'MM2-重启',
|
MM2_RESTART = 'MM2-重启',
|
||||||
MM2_STOP_RESUME = 'MM2-暂停&恢复',
|
MM2_STOP_RESUME = 'MM2-暂停&恢复',
|
||||||
|
// Connector
|
||||||
|
CONNECTOR_ADD = 'Connector-新增',
|
||||||
|
CONNECTOR_CHANGE_CONFIG = 'Connector-编辑',
|
||||||
|
CONNECTOR_DELETE = 'Connector-删除',
|
||||||
|
CONNECTOR_RESTART = 'Connector-重启',
|
||||||
|
CONNECTOR_STOP_RESUME = 'Connector-暂停&恢复',
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface PermissionNode {
|
export interface PermissionNode {
|
||||||
@@ -88,6 +97,13 @@ const CommonConfig = () => {
|
|||||||
clustersPermissions &&
|
clustersPermissions &&
|
||||||
clustersPermissions.childList.forEach((node: PermissionNode) => node.has && userPermissions.push(node.permissionName));
|
clustersPermissions.childList.forEach((node: PermissionNode) => node.has && userPermissions.push(node.permissionName));
|
||||||
|
|
||||||
|
// 获取用户在系统管理拥有的权限
|
||||||
|
const configPermissions = userPermissionTree.find(
|
||||||
|
(sys: PermissionNode) => sys.permissionName === ClustersPermissionMap.SYS_MANAGE
|
||||||
|
);
|
||||||
|
configPermissions &&
|
||||||
|
configPermissions.childList.forEach((node: PermissionNode) => node.has && userPermissions.push(node.permissionName));
|
||||||
|
|
||||||
const hasPermission = (permissionName: ClustersPermissionMap) => permissionName && userPermissions.includes(permissionName);
|
const hasPermission = (permissionName: ClustersPermissionMap) => permissionName && userPermissions.includes(permissionName);
|
||||||
|
|
||||||
setGlobal((curState: any) => ({ ...curState, permissions: allPermissions, userPermissions, hasPermission, userInfo }));
|
setGlobal((curState: any) => ({ ...curState, permissions: allPermissions, userPermissions, hasPermission, userInfo }));
|
||||||
|
|||||||
@@ -189,7 +189,14 @@ const StepFormFirst = (props: SubFormProps) => {
|
|||||||
const result: FormConnectorConfigs = {
|
const result: FormConnectorConfigs = {
|
||||||
pluginConfig: {},
|
pluginConfig: {},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// 获取一份默认配置
|
||||||
|
const defaultPluginConfig: any = {};
|
||||||
|
|
||||||
pluginConfig.configs.forEach(({ definition }) => {
|
pluginConfig.configs.forEach(({ definition }) => {
|
||||||
|
// 获取一份默认配置
|
||||||
|
defaultPluginConfig[definition.name] = definition?.defaultValue;
|
||||||
|
|
||||||
if (!getExistFormItems(pluginType).includes(definition.name)) {
|
if (!getExistFormItems(pluginType).includes(definition.name)) {
|
||||||
const pluginConfigs = result.pluginConfig;
|
const pluginConfigs = result.pluginConfig;
|
||||||
const group = definition.group || 'Others';
|
const group = definition.group || 'Others';
|
||||||
@@ -205,7 +212,7 @@ const StepFormFirst = (props: SubFormProps) => {
|
|||||||
|
|
||||||
Object.keys(result).length &&
|
Object.keys(result).length &&
|
||||||
form.setFieldsValue({
|
form.setFieldsValue({
|
||||||
configs: result,
|
configs: { ...result, defaultPluginConfig, editConnectorConfig: result.connectorConfig },
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
.finally(() => props.setSubmitLoading(false));
|
.finally(() => props.setSubmitLoading(false));
|
||||||
@@ -816,6 +823,8 @@ const StepFormFifth = (props: SubFormProps) => {
|
|||||||
<InputNumber />
|
<InputNumber />
|
||||||
) : type.toUpperCase() === 'BOOLEAN' ? (
|
) : type.toUpperCase() === 'BOOLEAN' ? (
|
||||||
<Switch size="small" />
|
<Switch size="small" />
|
||||||
|
) : type.toUpperCase() === 'PASSWORD' ? (
|
||||||
|
<Input.Password />
|
||||||
) : (
|
) : (
|
||||||
<Input />
|
<Input />
|
||||||
)}
|
)}
|
||||||
@@ -947,7 +956,7 @@ export default forwardRef(
|
|||||||
success?: {
|
success?: {
|
||||||
connectClusterId: number;
|
connectClusterId: number;
|
||||||
connectorName: string;
|
connectorName: string;
|
||||||
configs: {
|
config: {
|
||||||
[key: string]: any;
|
[key: string]: any;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@@ -955,6 +964,7 @@ export default forwardRef(
|
|||||||
}) => void
|
}) => void
|
||||||
) => {
|
) => {
|
||||||
const promises: Promise<any>[] = [];
|
const promises: Promise<any>[] = [];
|
||||||
|
const compareConfig = stepsFormRef.current[0].getFieldValue('configs'); // 获取步骤一的form信息
|
||||||
Object.values(stepsFormRef.current).forEach((form, i) => {
|
Object.values(stepsFormRef.current).forEach((form, i) => {
|
||||||
const promise = form
|
const promise = form
|
||||||
.validateFields()
|
.validateFields()
|
||||||
@@ -985,11 +995,22 @@ export default forwardRef(
|
|||||||
const [k, ...v] = l.split('=');
|
const [k, ...v] = l.split('=');
|
||||||
result[k] = v.join('=');
|
result[k] = v.join('=');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const editConnectorConfig = operateInfo.type === 'edit' ? compareConfig.editConnectorConfig : {}; // 编辑状态时拿到config配置
|
||||||
|
const newCompareConfig = { ...compareConfig.defaultPluginConfig, ...editConnectorConfig, ...result }; // 整合后的表单提交信息
|
||||||
|
Object.keys(newCompareConfig).forEach((item) => {
|
||||||
|
if (
|
||||||
|
newCompareConfig[item] === compareConfig.defaultPluginConfig[item] ||
|
||||||
|
newCompareConfig[item]?.toString() === compareConfig.defaultPluginConfig[item]?.toString()
|
||||||
|
) {
|
||||||
|
delete newCompareConfig[item]; // 清除默认值
|
||||||
|
}
|
||||||
|
});
|
||||||
callback({
|
callback({
|
||||||
success: {
|
success: {
|
||||||
connectClusterId: res[0].connectClusterId,
|
connectClusterId: res[0].connectClusterId,
|
||||||
connectorName: result['name'],
|
connectorName: result['name'],
|
||||||
configs: result,
|
config: newCompareConfig,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
@@ -1013,7 +1034,7 @@ export default forwardRef(
|
|||||||
curClusterName = cluster.label;
|
curClusterName = cluster.label;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
(jsonRef as any)?.onOpen(operateInfo.type, curClusterName, info.success.configs);
|
(jsonRef as any)?.onOpen(operateInfo.type, curClusterName, info.success.config);
|
||||||
onClose();
|
onClose();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -1026,9 +1047,9 @@ export default forwardRef(
|
|||||||
setCurrentStep(info.error);
|
setCurrentStep(info.error);
|
||||||
} else {
|
} else {
|
||||||
setSubmitLoading(true);
|
setSubmitLoading(true);
|
||||||
Object.entries(info.success.configs).forEach(([key, val]) => {
|
Object.entries(info.success.config).forEach(([key, val]) => {
|
||||||
if (val === null) {
|
if (val === null) {
|
||||||
delete info.success.configs[key];
|
delete info.success.config[key];
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
Utils.put(api.validateConnectorConfig, info.success).then(
|
Utils.put(api.validateConnectorConfig, info.success).then(
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ const PLACEHOLDER = `配置格式如下
|
|||||||
|
|
||||||
{
|
{
|
||||||
"connectClusterName": "", // Connect Cluster 名称
|
"connectClusterName": "", // Connect Cluster 名称
|
||||||
"configs": { // 具体配置项
|
"config": { // 具体配置项
|
||||||
"name": "",
|
"name": "",
|
||||||
"connector.class": "",
|
"connector.class": "",
|
||||||
"tasks.max": 1,
|
"tasks.max": 1,
|
||||||
@@ -47,7 +47,7 @@ export default forwardRef((props: any, ref) => {
|
|||||||
configs: JSON.stringify(
|
configs: JSON.stringify(
|
||||||
{
|
{
|
||||||
connectClusterName,
|
connectClusterName,
|
||||||
configs: defaultConfigs,
|
config: defaultConfigs,
|
||||||
},
|
},
|
||||||
null,
|
null,
|
||||||
2
|
2
|
||||||
@@ -63,13 +63,13 @@ export default forwardRef((props: any, ref) => {
|
|||||||
form.validateFields().then(
|
form.validateFields().then(
|
||||||
(data) => {
|
(data) => {
|
||||||
const postData = JSON.parse(data.configs);
|
const postData = JSON.parse(data.configs);
|
||||||
postData.connectorName = postData.configs.name;
|
postData.connectorName = postData.config.name;
|
||||||
postData.connectClusterId = connectClusters.find((cluster) => cluster.label === postData.connectClusterName).value;
|
postData.connectClusterId = connectClusters.find((cluster) => cluster.label === postData.connectClusterName).value;
|
||||||
delete postData.connectClusterName;
|
delete postData.connectClusterName;
|
||||||
|
|
||||||
Object.entries(postData.configs).forEach(([key, val]) => {
|
Object.entries(postData.config).forEach(([key, val]) => {
|
||||||
if (val === null) {
|
if (val === null) {
|
||||||
delete postData.configs[key];
|
delete postData.config[key];
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
Utils.put(api.validateConnectorConfig, postData).then(
|
Utils.put(api.validateConnectorConfig, postData).then(
|
||||||
@@ -198,20 +198,20 @@ export default forwardRef((props: any, ref) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!v.configs || typeof v.configs !== 'object') {
|
if (!v.config || typeof v.config !== 'object') {
|
||||||
return Promise.reject('内容缺少 configs 字段或字段格式错误');
|
return Promise.reject('内容缺少 config 字段或字段格式错误');
|
||||||
} else {
|
} else {
|
||||||
// 校验 connectorName 字段
|
// 校验 connectorName 字段
|
||||||
if (!v.configs.name) {
|
if (!v.config.name) {
|
||||||
return Promise.reject('configs 字段下缺少 name 项');
|
return Promise.reject('config 字段下缺少 name 项');
|
||||||
} else {
|
} else {
|
||||||
if (type === 'edit' && v.configs.name !== defaultConfigs.name) {
|
if (type === 'edit' && v.config.name !== defaultConfigs.name) {
|
||||||
return Promise.reject('编辑模式下不允许修改 name 字段');
|
return Promise.reject('编辑模式下不允许修改 name 字段');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!v.configs['connector.class']) {
|
if (!v.config['connector.class']) {
|
||||||
return Promise.reject('configs 字段下缺少 connector.class 项');
|
return Promise.reject('config 字段下缺少 connector.class 项');
|
||||||
} else if (type === 'edit' && v.configs['connector.class'] !== defaultConfigs['connector.class']) {
|
} else if (type === 'edit' && v.config['connector.class'] !== defaultConfigs['connector.class']) {
|
||||||
return Promise.reject('编辑模式下不允许修改 connector.class 字段');
|
return Promise.reject('编辑模式下不允许修改 connector.class 字段');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -219,13 +219,13 @@ export default forwardRef((props: any, ref) => {
|
|||||||
if (type === 'create') {
|
if (type === 'create') {
|
||||||
// 异步校验 connector 名称是否重复 以及 className 是否存在
|
// 异步校验 connector 名称是否重复 以及 className 是否存在
|
||||||
return Promise.all([
|
return Promise.all([
|
||||||
Utils.request(api.isConnectorExist(connectClusterId, v.configs.name)),
|
Utils.request(api.isConnectorExist(connectClusterId, v.config.name)),
|
||||||
Utils.request(api.getConnectorPlugins(connectClusterId)),
|
Utils.request(api.getConnectorPlugins(connectClusterId)),
|
||||||
]).then(
|
]).then(
|
||||||
([data, plugins]: [any, ConnectorPlugin[]]) => {
|
([data, plugins]: [any, ConnectorPlugin[]]) => {
|
||||||
return data?.exist
|
return data?.exist
|
||||||
? Promise.reject('name 与已有 Connector 重复')
|
? Promise.reject('name 与已有 Connector 重复')
|
||||||
: plugins.every((plugin) => plugin.className !== v.configs['connector.class'])
|
: plugins.every((plugin) => plugin.className !== v.config['connector.class'])
|
||||||
? Promise.reject('该 connectCluster 下不存在 connector.class 项配置的插件')
|
? Promise.reject('该 connectCluster 下不存在 connector.class 项配置的插件')
|
||||||
: Promise.resolve();
|
: Promise.resolve();
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import SmallChart from '@src/components/SmallChart';
|
import SmallChart from '@src/components/SmallChart';
|
||||||
import TagsWithHide from '@src/components/TagsWithHide';
|
import TagsWithHide from '@src/components/TagsWithHide';
|
||||||
import { Button, Tag, Tooltip, Utils, Popconfirm } from 'knowdesign';
|
import { Button, Tag, Tooltip, Utils, Popconfirm, AppContainer } from 'knowdesign';
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import Delete from './Delete';
|
import Delete from './Delete';
|
||||||
|
import { ClustersPermissionMap } from '../CommonConfig';
|
||||||
export const defaultPagination = {
|
export const defaultPagination = {
|
||||||
current: 1,
|
current: 1,
|
||||||
pageSize: 10,
|
pageSize: 10,
|
||||||
@@ -93,7 +94,8 @@ const renderLine = (record: any, metricName: string) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const getConnectorsColumns = (arg?: any) => {
|
export const getConnectorsColumns = (arg?: any) => {
|
||||||
const columns = [
|
const [global] = AppContainer.useGlobalValue();
|
||||||
|
const columns: any = [
|
||||||
{
|
{
|
||||||
title: 'Connector Name',
|
title: 'Connector Name',
|
||||||
dataIndex: 'connectorName',
|
dataIndex: 'connectorName',
|
||||||
@@ -213,7 +215,10 @@ export const getConnectorsColumns = (arg?: any) => {
|
|||||||
return t && t.length > 0 ? <TagsWithHide placement="bottom" list={t} expandTagContent={(num: any) => `共有${num}个`} /> : '-';
|
return t && t.length > 0 ? <TagsWithHide placement="bottom" list={t} expandTagContent={(num: any) => `共有${num}个`} /> : '-';
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
];
|
||||||
|
|
||||||
|
if (global.hasPermission) {
|
||||||
|
columns.push({
|
||||||
title: '操作',
|
title: '操作',
|
||||||
dataIndex: 'options',
|
dataIndex: 'options',
|
||||||
key: 'options',
|
key: 'options',
|
||||||
@@ -224,20 +229,24 @@ export const getConnectorsColumns = (arg?: any) => {
|
|||||||
render: (_t: any, r: any) => {
|
render: (_t: any, r: any) => {
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<Popconfirm
|
{global.hasPermission(ClustersPermissionMap.CONNECTOR_RESTART) ? (
|
||||||
title="是否重启当前任务?"
|
<Popconfirm
|
||||||
onConfirm={() => arg?.optionConnect(r, 'restart')}
|
title="是否重启当前任务?"
|
||||||
// onCancel={cancel}
|
onConfirm={() => arg?.optionConnect(r, 'restart')}
|
||||||
okText="是"
|
// onCancel={cancel}
|
||||||
cancelText="否"
|
okText="是"
|
||||||
overlayClassName="connect-popconfirm"
|
cancelText="否"
|
||||||
>
|
overlayClassName="connect-popconfirm"
|
||||||
<Button key="restart" type="link" size="small">
|
>
|
||||||
重启
|
<Button key="restart" type="link" size="small">
|
||||||
</Button>
|
重启
|
||||||
</Popconfirm>
|
</Button>
|
||||||
|
</Popconfirm>
|
||||||
|
) : (
|
||||||
|
<></>
|
||||||
|
)}
|
||||||
|
|
||||||
{(r.state === 'RUNNING' || r.state === 'PAUSED') && (
|
{global.hasPermission(ClustersPermissionMap.CONNECTOR_STOP_RESUME) && (r.state === 'RUNNING' || r.state === 'PAUSED') && (
|
||||||
<Popconfirm
|
<Popconfirm
|
||||||
title={`是否${r.state === 'RUNNING' ? '暂停' : '继续'}当前任务?`}
|
title={`是否${r.state === 'RUNNING' ? '暂停' : '继续'}当前任务?`}
|
||||||
onConfirm={() => arg?.optionConnect(r, r.state === 'RUNNING' ? 'stop' : 'resume')}
|
onConfirm={() => arg?.optionConnect(r, r.state === 'RUNNING' ? 'stop' : 'resume')}
|
||||||
@@ -252,16 +261,24 @@ export const getConnectorsColumns = (arg?: any) => {
|
|||||||
</Button>
|
</Button>
|
||||||
</Popconfirm>
|
</Popconfirm>
|
||||||
)}
|
)}
|
||||||
|
{global.hasPermission(ClustersPermissionMap.CONNECTOR_CHANGE_CONFIG) ? (
|
||||||
|
<Button type="link" size="small" onClick={() => arg?.editConnector(r)}>
|
||||||
|
编辑
|
||||||
|
</Button>
|
||||||
|
) : (
|
||||||
|
<></>
|
||||||
|
)}
|
||||||
|
|
||||||
<Button type="link" size="small" onClick={() => arg?.editConnector(r)}>
|
{global.hasPermission(ClustersPermissionMap.CONNECTOR_DELETE) ? (
|
||||||
编辑
|
<Delete record={r} onConfirm={arg?.deleteTesk}></Delete>
|
||||||
</Button>
|
) : (
|
||||||
<Delete record={r} onConfirm={arg?.deleteTesk}></Delete>
|
<></>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
},
|
});
|
||||||
];
|
}
|
||||||
return columns;
|
return columns;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import notification from '@src/components/Notification';
|
|||||||
import './index.less';
|
import './index.less';
|
||||||
import AddConnectorUseJSON from './AddConnectorUseJSON';
|
import AddConnectorUseJSON from './AddConnectorUseJSON';
|
||||||
import HasConnector from './HasConnector';
|
import HasConnector from './HasConnector';
|
||||||
|
import { ClustersPermissionMap } from '../CommonConfig';
|
||||||
const { request } = Utils;
|
const { request } = Utils;
|
||||||
|
|
||||||
const rateMap: any = {
|
const rateMap: any = {
|
||||||
@@ -174,21 +175,25 @@ const Connectors: React.FC = () => {
|
|||||||
maxLength: 128,
|
maxLength: 128,
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
<span className="add-connect">
|
{global.hasPermission && global.hasPermission(ClustersPermissionMap.CONNECTOR_ADD) ? (
|
||||||
<Button
|
<span className="add-connect">
|
||||||
className="add-connect-btn"
|
<Button
|
||||||
icon={<IconFont type="icon-jiahao" />}
|
className="add-connect-btn"
|
||||||
type="primary"
|
icon={<IconFont type="icon-jiahao" />}
|
||||||
onClick={() => addConnectorRef.current?.onOpen('create', addConnectorJsonRef.current)}
|
type="primary"
|
||||||
>
|
onClick={() => addConnectorRef.current?.onOpen('create', addConnectorJsonRef.current)}
|
||||||
新增Connector
|
>
|
||||||
</Button>
|
新增Connector
|
||||||
<Dropdown overlayClassName="add-connect-dropdown-menu" overlay={menu}>
|
|
||||||
<Button className="add-connect-json" type="primary">
|
|
||||||
<IconFont type="icon-guanwangxiala" />
|
|
||||||
</Button>
|
</Button>
|
||||||
</Dropdown>
|
<Dropdown overlayClassName="add-connect-dropdown-menu" overlay={menu}>
|
||||||
</span>
|
<Button className="add-connect-json" type="primary">
|
||||||
|
<IconFont type="icon-guanwangxiala" />
|
||||||
|
</Button>
|
||||||
|
</Dropdown>
|
||||||
|
</span>
|
||||||
|
) : (
|
||||||
|
<></>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<ProTable
|
<ProTable
|
||||||
|
|||||||
@@ -0,0 +1,104 @@
|
|||||||
|
import React, { useState } from 'react';
|
||||||
|
import { useParams } from 'react-router-dom';
|
||||||
|
import { Button, Form, Input, Modal, Utils } from 'knowdesign';
|
||||||
|
import notification from '@src/components/Notification';
|
||||||
|
import Api from '@src/api/index';
|
||||||
|
|
||||||
|
// eslint-disable-next-line react/display-name
|
||||||
|
export default (props: { record: any; onConfirm?: () => void }) => {
|
||||||
|
const { record, onConfirm } = props;
|
||||||
|
const routeParams = useParams<{
|
||||||
|
clusterId: string;
|
||||||
|
}>();
|
||||||
|
const [form] = Form.useForm();
|
||||||
|
const [delDialogVisible, setDelDialogVisble] = useState(false);
|
||||||
|
const handleDelOk = () => {
|
||||||
|
form.validateFields().then((e) => {
|
||||||
|
const formVal = form.getFieldsValue();
|
||||||
|
formVal.clusterPhyId = Number(routeParams.clusterId);
|
||||||
|
formVal.deleteType = 0;
|
||||||
|
Utils.delete(Api.deleteGroupOffset(), { data: formVal }).then((res: any) => {
|
||||||
|
if (res === null) {
|
||||||
|
notification.success({
|
||||||
|
message: '删除消费组成功',
|
||||||
|
});
|
||||||
|
setDelDialogVisble(false);
|
||||||
|
onConfirm && onConfirm();
|
||||||
|
} else {
|
||||||
|
notification.error({
|
||||||
|
message: '删除消费组失败',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Button
|
||||||
|
style={{ paddingLeft: 0 }}
|
||||||
|
type="link"
|
||||||
|
onClick={(_) => {
|
||||||
|
setDelDialogVisble(true);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
删除
|
||||||
|
</Button>
|
||||||
|
<Modal
|
||||||
|
className="custom-modal"
|
||||||
|
title="确定要删除此Topic吗?"
|
||||||
|
centered={true}
|
||||||
|
visible={delDialogVisible}
|
||||||
|
wrapClassName="del-topic-modal"
|
||||||
|
destroyOnClose={true}
|
||||||
|
maskClosable={false}
|
||||||
|
onOk={handleDelOk}
|
||||||
|
onCancel={(_) => {
|
||||||
|
setDelDialogVisble(false);
|
||||||
|
}}
|
||||||
|
okText="删除"
|
||||||
|
okButtonProps={{
|
||||||
|
danger: true,
|
||||||
|
size: 'small',
|
||||||
|
style: {
|
||||||
|
paddingLeft: '16px',
|
||||||
|
paddingRight: '16px',
|
||||||
|
},
|
||||||
|
}}
|
||||||
|
cancelButtonProps={{
|
||||||
|
size: 'small',
|
||||||
|
style: {
|
||||||
|
paddingLeft: '16px',
|
||||||
|
paddingRight: '16px',
|
||||||
|
},
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{/* <div className="tip-info">
|
||||||
|
<IconFont type="icon-warning-circle"></IconFont>
|
||||||
|
<span>会删除Topic的全部消息数据和ACL权限!请再次输入Topic名称进行确认!</span>
|
||||||
|
</div> */}
|
||||||
|
<Form form={form} labelCol={{ span: 5 }} style={{ marginTop: 18 }}>
|
||||||
|
<Form.Item label="TopicName">{record.name}</Form.Item>
|
||||||
|
<Form.Item
|
||||||
|
name="groupName"
|
||||||
|
label="GroupName"
|
||||||
|
rules={[
|
||||||
|
// { required: true },
|
||||||
|
() => ({
|
||||||
|
validator(_, value) {
|
||||||
|
if (!value) {
|
||||||
|
return Promise.reject(new Error('请输入Group名称'));
|
||||||
|
} else if (value !== record.name) {
|
||||||
|
return Promise.reject(new Error('请输入正确的Group名称'));
|
||||||
|
}
|
||||||
|
return Promise.resolve();
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]}
|
||||||
|
>
|
||||||
|
<Input placeholder="请输入" size="small"></Input>
|
||||||
|
</Form.Item>
|
||||||
|
</Form>
|
||||||
|
</Modal>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -1,12 +1,13 @@
|
|||||||
import React, { useState, useEffect } from 'react';
|
import React, { useState, useEffect } from 'react';
|
||||||
import { useParams, useHistory } from 'react-router-dom';
|
import { useParams, useHistory } from 'react-router-dom';
|
||||||
import { Drawer, ProTable, Utils } from 'knowdesign';
|
import { Button, Space, Divider, Drawer, ProTable, Utils, notification } from 'knowdesign';
|
||||||
import { IconFont } from '@knowdesign/icons';
|
import { IconFont } from '@knowdesign/icons';
|
||||||
import API from '@src/api/index';
|
import API from '@src/api/index';
|
||||||
import { defaultPagination, hashDataParse } from '@src/constants/common';
|
import { defaultPagination, hashDataParse } from '@src/constants/common';
|
||||||
import { getGtoupTopicColumns } from './config';
|
import { getGtoupTopicColumns } from './config';
|
||||||
import { ExpandedRow } from './ExpandedRow';
|
import { ExpandedRow } from './ExpandedRow';
|
||||||
import ResetOffsetDrawer from './ResetOffsetDrawer';
|
import ResetOffsetDrawer from './ResetOffsetDrawer';
|
||||||
|
import { useForceRefresh } from '@src/components/utils';
|
||||||
const { request } = Utils;
|
const { request } = Utils;
|
||||||
|
|
||||||
export interface MetricLine {
|
export interface MetricLine {
|
||||||
@@ -63,6 +64,7 @@ const GroupDetail = (props: any) => {
|
|||||||
const [openKeys, setOpenKeys] = useState();
|
const [openKeys, setOpenKeys] = useState();
|
||||||
const [resetOffsetVisible, setResetOffsetVisible] = useState(false);
|
const [resetOffsetVisible, setResetOffsetVisible] = useState(false);
|
||||||
const [resetOffsetArg, setResetOffsetArg] = useState({});
|
const [resetOffsetArg, setResetOffsetArg] = useState({});
|
||||||
|
const [refreshKey, forceRefresh] = useForceRefresh();
|
||||||
|
|
||||||
const genData = async ({ pageNo, pageSize, groupName }: any) => {
|
const genData = async ({ pageNo, pageSize, groupName }: any) => {
|
||||||
if (urlParams?.clusterId === undefined) return;
|
if (urlParams?.clusterId === undefined) return;
|
||||||
@@ -110,6 +112,23 @@ const GroupDetail = (props: any) => {
|
|||||||
groupName: record?.groupName,
|
groupName: record?.groupName,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
// 删除消费组Topic
|
||||||
|
const deleteOffset = (record: any) => {
|
||||||
|
const params = {
|
||||||
|
clusterPhyId: +urlParams?.clusterId,
|
||||||
|
deleteType: 1, // 0:group纬度,1:Topic纬度,2:Partition纬度
|
||||||
|
groupName: record.groupName,
|
||||||
|
topicName: record.topicName,
|
||||||
|
};
|
||||||
|
Utils.delete(API.deleteGroupOffset(), { data: params }).then((data: any) => {
|
||||||
|
if (data === null) {
|
||||||
|
notification.success({
|
||||||
|
message: '删除Topic成功!',
|
||||||
|
});
|
||||||
|
genData({ pageNo: 1, pageSize: pagination.pageSize, groupName: hashData.groupName });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
const onTableChange = (pagination: any, filters: any, sorter: any) => {
|
const onTableChange = (pagination: any, filters: any, sorter: any) => {
|
||||||
genData({ pageNo: pagination.current, pageSize: pagination.pageSize, filters, sorter, groupName: hashData.groupName });
|
genData({ pageNo: pagination.current, pageSize: pagination.pageSize, filters, sorter, groupName: hashData.groupName });
|
||||||
@@ -160,7 +179,7 @@ const GroupDetail = (props: any) => {
|
|||||||
// // 获取Consumer列表 表格模式
|
// // 获取Consumer列表 表格模式
|
||||||
// getTopicGroupMetric(hashData);
|
// getTopicGroupMetric(hashData);
|
||||||
// });
|
// });
|
||||||
}, [hashDataParse(location.hash).groupName]);
|
}, [hashDataParse(location.hash).groupName, refreshKey]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Drawer
|
<Drawer
|
||||||
@@ -182,6 +201,14 @@ const GroupDetail = (props: any) => {
|
|||||||
// <Divider type="vertical" />
|
// <Divider type="vertical" />
|
||||||
// </Space>
|
// </Space>
|
||||||
// }
|
// }
|
||||||
|
extra={
|
||||||
|
<Space>
|
||||||
|
<span style={{ display: 'inline-block', fontSize: '15px' }} onClick={forceRefresh as () => void}>
|
||||||
|
<i className="iconfont icon-shuaxin1" style={{ cursor: 'pointer' }} />
|
||||||
|
</span>
|
||||||
|
<Divider type="vertical" />
|
||||||
|
</Space>
|
||||||
|
}
|
||||||
>
|
>
|
||||||
<ProTable
|
<ProTable
|
||||||
showQueryForm={false}
|
showQueryForm={false}
|
||||||
@@ -189,7 +216,7 @@ const GroupDetail = (props: any) => {
|
|||||||
showHeader: false,
|
showHeader: false,
|
||||||
rowKey: 'key',
|
rowKey: 'key',
|
||||||
loading: loading,
|
loading: loading,
|
||||||
columns: getGtoupTopicColumns({ resetOffset }),
|
columns: getGtoupTopicColumns({ resetOffset, deleteOffset }),
|
||||||
dataSource: topicData,
|
dataSource: topicData,
|
||||||
paginationProps: { ...pagination },
|
paginationProps: { ...pagination },
|
||||||
// noPagination: true,
|
// noPagination: true,
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import { IconFont } from '@knowdesign/icons';
|
|||||||
import API from '@src/api/index';
|
import API from '@src/api/index';
|
||||||
import { hashDataParse } from '@src/constants/common';
|
import { hashDataParse } from '@src/constants/common';
|
||||||
const { Option } = Select;
|
const { Option } = Select;
|
||||||
|
import PubSub from 'pubsub-js'
|
||||||
|
|
||||||
export interface MetricLine {
|
export interface MetricLine {
|
||||||
createTime?: number;
|
createTime?: number;
|
||||||
@@ -214,6 +215,11 @@ export const ExpandedRow: any = ({ record, groupName }: any) => {
|
|||||||
// getTopicGroupMetric();
|
// getTopicGroupMetric();
|
||||||
// }, [sortObj]);
|
// }, [sortObj]);
|
||||||
|
|
||||||
|
// 订阅重置offset成功的消息
|
||||||
|
PubSub.subscribe('ConsumerGroup-ResetOffset', function(data){
|
||||||
|
getTopicGroupMetric({});
|
||||||
|
})
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const hashData = hashDataParse(location.hash);
|
const hashData = hashDataParse(location.hash);
|
||||||
// if (!hashData.groupName) return;
|
// if (!hashData.groupName) return;
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { useParams } from 'react-router-dom';
|
|||||||
import EditTable from '../TestingProduce/component/EditTable';
|
import EditTable from '../TestingProduce/component/EditTable';
|
||||||
import Api from '@src/api/index';
|
import Api from '@src/api/index';
|
||||||
import moment from 'moment';
|
import moment from 'moment';
|
||||||
|
import PubSub from 'pubsub-js'
|
||||||
|
|
||||||
const CustomSelectResetTime = (props: { value?: string; onChange?: (val: Number | String) => void }) => {
|
const CustomSelectResetTime = (props: { value?: string; onChange?: (val: Number | String) => void }) => {
|
||||||
const { value, onChange } = props;
|
const { value, onChange } = props;
|
||||||
@@ -106,6 +107,8 @@ export default (props: any) => {
|
|||||||
message: '重置offset成功',
|
message: '重置offset成功',
|
||||||
});
|
});
|
||||||
setVisible(false);
|
setVisible(false);
|
||||||
|
// 发布重置offset成功的消息
|
||||||
|
PubSub.publish('ConsumerGroup-ResetOffset', '1');
|
||||||
} else {
|
} else {
|
||||||
notification.error({
|
notification.error({
|
||||||
message: '重置offset失败',
|
message: '重置offset失败',
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
|
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { AppContainer } from 'knowdesign';
|
import { AppContainer, Button, Popconfirm } from 'knowdesign';
|
||||||
import TagsWithHide from '@src/components/TagsWithHide';
|
import TagsWithHide from '@src/components/TagsWithHide';
|
||||||
import { ClustersPermissionMap } from '../CommonConfig';
|
import { ClustersPermissionMap } from '../CommonConfig';
|
||||||
|
import Delete from './Delete';
|
||||||
|
|
||||||
export const runningStatusEnum: any = {
|
export const runningStatusEnum: any = {
|
||||||
1: 'Doing',
|
1: 'Doing',
|
||||||
@@ -62,6 +63,21 @@ export const getGroupColumns = (arg?: any) => {
|
|||||||
width: 200,
|
width: 200,
|
||||||
render: (t: number) => (t ? t.toLocaleString() : '-'),
|
render: (t: number) => (t ? t.toLocaleString() : '-'),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
title: '操作',
|
||||||
|
dataIndex: 'options',
|
||||||
|
key: 'options',
|
||||||
|
width: 200,
|
||||||
|
filterTitle: true,
|
||||||
|
fixed: 'right',
|
||||||
|
render: (_t: any, r: any) => {
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<Delete record={r} onConfirm={arg?.deleteTesk}></Delete>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
},
|
||||||
|
},
|
||||||
];
|
];
|
||||||
return columns;
|
return columns;
|
||||||
};
|
};
|
||||||
@@ -103,11 +119,20 @@ export const getGtoupTopicColumns = (arg?: any) => {
|
|||||||
title: '操作',
|
title: '操作',
|
||||||
dataIndex: 'desc',
|
dataIndex: 'desc',
|
||||||
key: 'desc',
|
key: 'desc',
|
||||||
width: 150,
|
width: 200,
|
||||||
render: (value: any, record: any) => {
|
render: (value: any, record: any) => {
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<a onClick={() => arg.resetOffset(record)}>重置Offset</a>
|
<a onClick={() => arg.resetOffset(record)}>重置Offset</a>
|
||||||
|
<Popconfirm
|
||||||
|
placement="top"
|
||||||
|
title={`是否要删除当前Topic?`}
|
||||||
|
onConfirm={() => arg.deleteOffset(record)}
|
||||||
|
okText="是"
|
||||||
|
cancelText="否"
|
||||||
|
>
|
||||||
|
<Button type="link">删除</Button>
|
||||||
|
</Popconfirm>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -58,6 +58,11 @@ const BrokerList: React.FC = (props: any) => {
|
|||||||
genData({ pageNo: pagination.current, pageSize: pagination.pageSize, filters, sorter });
|
genData({ pageNo: pagination.current, pageSize: pagination.pageSize, filters, sorter });
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// 删除Group
|
||||||
|
const deleteTesk = () => {
|
||||||
|
genData({ pageNo: 1, pageSize: pagination.pageSize });
|
||||||
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
genData({
|
genData({
|
||||||
pageNo: 1,
|
pageNo: 1,
|
||||||
@@ -115,7 +120,7 @@ const BrokerList: React.FC = (props: any) => {
|
|||||||
showHeader: false,
|
showHeader: false,
|
||||||
rowKey: 'group_list',
|
rowKey: 'group_list',
|
||||||
loading: loading,
|
loading: loading,
|
||||||
columns: getGroupColumns(),
|
columns: getGroupColumns(deleteTesk),
|
||||||
dataSource: data,
|
dataSource: data,
|
||||||
paginationProps: { ...pagination },
|
paginationProps: { ...pagination },
|
||||||
attrs: {
|
attrs: {
|
||||||
|
|||||||
@@ -522,21 +522,15 @@ const ConnectorForm = (props: {
|
|||||||
const params = {
|
const params = {
|
||||||
...values,
|
...values,
|
||||||
id: initFieldsValue?.id,
|
id: initFieldsValue?.id,
|
||||||
jmxProperties: values.jmxProperties ? `{ "jmxProperties": "${values.jmxProperties}" }` : undefined,
|
jmxProperties: values.jmxProperties ? `{ "jmxPort": "${values.jmxProperties}" }` : undefined,
|
||||||
};
|
};
|
||||||
Utils.put(api.batchConnectClusters, [params])
|
Utils.put(api.batchConnectClusters, [params]).then((res) => {
|
||||||
.then((res) => {
|
// setSelectedTabKey(undefined);
|
||||||
// setSelectedTabKey(undefined);
|
getConnectClustersList();
|
||||||
getConnectClustersList();
|
notification.success({
|
||||||
notification.success({
|
message: '修改Connect集群成功',
|
||||||
message: '修改Connect集群成功',
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
notification.success({
|
|
||||||
message: '修改Connect集群失败',
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const onCancel = () => {
|
const onCancel = () => {
|
||||||
|
|||||||
@@ -135,6 +135,7 @@ const AddDrawer = forwardRef((_, ref) => {
|
|||||||
|
|
||||||
if (configType === 'custom') {
|
if (configType === 'custom') {
|
||||||
// 1. 自定义权限
|
// 1. 自定义权限
|
||||||
|
// TODO: 需要和后端联调
|
||||||
const { resourceType, resourcePatternType, aclPermissionType, aclOperation, aclClientHost } = formData;
|
const { resourceType, resourcePatternType, aclPermissionType, aclOperation, aclClientHost } = formData;
|
||||||
submitData.push({
|
submitData.push({
|
||||||
clusterId,
|
clusterId,
|
||||||
@@ -281,6 +282,42 @@ const AddDrawer = forwardRef((_, ref) => {
|
|||||||
</Form.Item>
|
</Form.Item>
|
||||||
<Form.Item dependencies={['configType']} style={{ marginBottom: 0 }}>
|
<Form.Item dependencies={['configType']} style={{ marginBottom: 0 }}>
|
||||||
{({ getFieldValue }) => {
|
{({ getFieldValue }) => {
|
||||||
|
const SelectFormItems = (props: { type: string }) => {
|
||||||
|
const { type } = props;
|
||||||
|
return (
|
||||||
|
<Form.Item
|
||||||
|
name={`${type}Name`}
|
||||||
|
dependencies={[`${type}PatternType`]}
|
||||||
|
validateTrigger="onBlur"
|
||||||
|
rules={[
|
||||||
|
({ getFieldValue }) => ({
|
||||||
|
validator: (rule: any, value: string) => {
|
||||||
|
if (!value) {
|
||||||
|
return Promise.reject(`${type}Name 不能为空`);
|
||||||
|
}
|
||||||
|
if (type === 'topic' && getFieldValue(`${type}PatternType`) === ACL_PATTERN_TYPE['Literal']) {
|
||||||
|
return Utils.request(api.getTopicMetadata(clusterId as any, value)).then((res: any) => {
|
||||||
|
return res?.exist ? Promise.resolve() : Promise.reject('该 Topic 不存在');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return Promise.resolve();
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]}
|
||||||
|
>
|
||||||
|
<AutoComplete
|
||||||
|
filterOption={(value, option) => {
|
||||||
|
if (option?.value.includes(value)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}}
|
||||||
|
options={type === 'topic' ? topicMetaData : groupMetaData}
|
||||||
|
placeholder={`请输入 ${type}Name`}
|
||||||
|
/>
|
||||||
|
</Form.Item>
|
||||||
|
);
|
||||||
|
};
|
||||||
const PatternTypeFormItems = (props: { type: string }) => {
|
const PatternTypeFormItems = (props: { type: string }) => {
|
||||||
const { type } = props;
|
const { type } = props;
|
||||||
const UpperCaseType = type[0].toUpperCase() + type.slice(1);
|
const UpperCaseType = type[0].toUpperCase() + type.slice(1);
|
||||||
@@ -388,6 +425,27 @@ const AddDrawer = forwardRef((_, ref) => {
|
|||||||
}))}
|
}))}
|
||||||
/>
|
/>
|
||||||
</Form.Item>
|
</Form.Item>
|
||||||
|
<Form.Item dependencies={['resourceType']}>
|
||||||
|
{({ getFieldValue }) => {
|
||||||
|
const type = getFieldValue('resourceType');
|
||||||
|
if (type === ACL_RESOURCE_TYPE['Cluster'] || type === ACL_RESOURCE_TYPE['TransactionalId']) {
|
||||||
|
//TODO需要和后端获取集群和事务接口联调
|
||||||
|
return (
|
||||||
|
<Form.Item
|
||||||
|
name={`${type === 4 ? 'cluster' : 'transactionalId'}`}
|
||||||
|
rules={[{ required: true, message: `${type === 4 ? 'Cluster名称' : 'TransactionalId'} 不能为空` }]}
|
||||||
|
>
|
||||||
|
<Input placeholder={`请输入${type === 4 ? 'Cluster名称' : 'TransactionalId'}`}></Input>
|
||||||
|
</Form.Item>
|
||||||
|
);
|
||||||
|
} else if (type === ACL_RESOURCE_TYPE['Topic']) {
|
||||||
|
return <PatternTypeFormItems type="topic" />;
|
||||||
|
} else if (type === ACL_RESOURCE_TYPE['Group']) {
|
||||||
|
return <PatternTypeFormItems type="group" />;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}}
|
||||||
|
</Form.Item>
|
||||||
<Form.Item dependencies={['resourceType']} style={{ marginBottom: 0 }}>
|
<Form.Item dependencies={['resourceType']} style={{ marginBottom: 0 }}>
|
||||||
{({ getFieldValue }) => {
|
{({ getFieldValue }) => {
|
||||||
form.resetFields(['aclOperation']);
|
form.resetFields(['aclOperation']);
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import { useParams } from 'react-router-dom';
|
|||||||
import TagsWithHide from '@src/components/TagsWithHide';
|
import TagsWithHide from '@src/components/TagsWithHide';
|
||||||
import SwitchTab from '@src/components/SwitchTab';
|
import SwitchTab from '@src/components/SwitchTab';
|
||||||
import RenderEmpty from '@src/components/RenderEmpty';
|
import RenderEmpty from '@src/components/RenderEmpty';
|
||||||
|
import { useForceRefresh } from '@src/components/utils';
|
||||||
|
|
||||||
interface PropsType {
|
interface PropsType {
|
||||||
hashData: any;
|
hashData: any;
|
||||||
@@ -401,11 +402,18 @@ export default (props: PropsType) => {
|
|||||||
const { hashData } = props;
|
const { hashData } = props;
|
||||||
const [showMode, setShowMode] = useState<string>('card');
|
const [showMode, setShowMode] = useState<string>('card');
|
||||||
|
|
||||||
|
const [refreshKey, forceRefresh] = useForceRefresh();
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="brokers-tab-container">
|
<div className="brokers-tab-container" key={`${refreshKey}`}>
|
||||||
<div className="overview">
|
<div className="overview">
|
||||||
<div className="left">
|
<div className="left">
|
||||||
|
<span
|
||||||
|
style={{ display: 'inline-block', padding: '0 10px', marginRight: '10px', borderRight: '1px solid #ccc', fontSize: '15px' }}
|
||||||
|
onClick={forceRefresh as () => void}
|
||||||
|
>
|
||||||
|
<i className="iconfont icon-shuaxin1" style={{ cursor: 'pointer' }} />
|
||||||
|
</span>
|
||||||
<PartitionSummary clusterId={clusterId} topicName={hashData.topicName} />
|
<PartitionSummary clusterId={clusterId} topicName={hashData.topicName} />
|
||||||
</div>
|
</div>
|
||||||
<div className="cases-box">
|
<div className="cases-box">
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import { ClustersPermissionMap } from '../CommonConfig';
|
|||||||
import ResetOffsetDrawer from './ResetOffsetDrawer';
|
import ResetOffsetDrawer from './ResetOffsetDrawer';
|
||||||
import SwitchTab from '@src/components/SwitchTab';
|
import SwitchTab from '@src/components/SwitchTab';
|
||||||
import ContentWithCopy from '@src/components/CopyContent';
|
import ContentWithCopy from '@src/components/CopyContent';
|
||||||
|
import PubSub from "pubsub-js";
|
||||||
|
|
||||||
const { Option } = Select;
|
const { Option } = Select;
|
||||||
|
|
||||||
@@ -335,6 +336,11 @@ export default (props: any) => {
|
|||||||
});
|
});
|
||||||
}, [visible]);
|
}, [visible]);
|
||||||
|
|
||||||
|
// 订阅重置offset成功的消息
|
||||||
|
PubSub.subscribe('TopicDetail-ResetOffset', function(message, data){
|
||||||
|
getTopicGroupMetric({hashData: data});
|
||||||
|
})
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (partitionList.length === 0) return;
|
if (partitionList.length === 0) return;
|
||||||
getTopicGroupMetricHistory(partitionList, hashData);
|
getTopicGroupMetricHistory(partitionList, hashData);
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { useParams } from 'react-router-dom';
|
|||||||
import EditTable from '../TestingProduce/component/EditTable';
|
import EditTable from '../TestingProduce/component/EditTable';
|
||||||
import Api from '@src/api/index';
|
import Api from '@src/api/index';
|
||||||
import moment from 'moment';
|
import moment from 'moment';
|
||||||
|
import PubSub from "pubsub-js";
|
||||||
|
|
||||||
const CustomSelectResetTime = (props: { value?: string; onChange?: (val: Number | String) => void }) => {
|
const CustomSelectResetTime = (props: { value?: string; onChange?: (val: Number | String) => void }) => {
|
||||||
const { value, onChange } = props;
|
const { value, onChange } = props;
|
||||||
@@ -106,6 +107,13 @@ export default (props: any) => {
|
|||||||
message: '重置offset成功',
|
message: '重置offset成功',
|
||||||
});
|
});
|
||||||
setResetOffsetVisible(false);
|
setResetOffsetVisible(false);
|
||||||
|
// 发布重置offset成功的消息
|
||||||
|
PubSub.publish('TopicDetail-ResetOffset',
|
||||||
|
{
|
||||||
|
groupName: record.groupName,
|
||||||
|
topicName: record.topicName
|
||||||
|
}
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
notification.error({
|
notification.error({
|
||||||
message: '重置offset失败',
|
message: '重置offset失败',
|
||||||
|
|||||||
@@ -81,7 +81,8 @@ export const getTopicMessagesColmns = () => {
|
|||||||
title: 'Offset',
|
title: 'Offset',
|
||||||
dataIndex: 'offset',
|
dataIndex: 'offset',
|
||||||
key: 'offset',
|
key: 'offset',
|
||||||
render: (t: number) => (t ? t.toLocaleString() : '-'),
|
sorter: true,
|
||||||
|
render: (t: number) => (+t ? t.toLocaleString() : '-'),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: 'Timestamp',
|
title: 'Timestamp',
|
||||||
|
|||||||
@@ -26,6 +26,7 @@
|
|||||||
|
|
||||||
.left {
|
.left {
|
||||||
display: flex;
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
.info-box {
|
.info-box {
|
||||||
display: flex;
|
display: flex;
|
||||||
height: 36px;
|
height: 36px;
|
||||||
|
|||||||
@@ -15,9 +15,21 @@ import Replicator from './Replicator';
|
|||||||
import './index.less';
|
import './index.less';
|
||||||
import TopicDetailHealthCheck from '@src/components/CardBar/TopicDetailHealthCheck';
|
import TopicDetailHealthCheck from '@src/components/CardBar/TopicDetailHealthCheck';
|
||||||
import { hashDataParse } from '@src/constants/common';
|
import { hashDataParse } from '@src/constants/common';
|
||||||
|
import { useForceRefresh } from '@src/components/utils';
|
||||||
|
|
||||||
const { TabPane } = Tabs;
|
const { TabPane } = Tabs;
|
||||||
|
|
||||||
|
const Reload = (props: any) => {
|
||||||
|
return (
|
||||||
|
<span
|
||||||
|
style={{ display: 'inline-block', padding: '0 10px', marginRight: '10px', borderRight: '1px solid #ccc', fontSize: '15px' }}
|
||||||
|
onClick={props.forceRefresh as () => void}
|
||||||
|
>
|
||||||
|
<i className="iconfont icon-shuaxin1" style={{ cursor: 'pointer' }} />
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
const OperationsSlot: any = {
|
const OperationsSlot: any = {
|
||||||
// eslint-disable-next-line react/display-name
|
// eslint-disable-next-line react/display-name
|
||||||
// ['Partitions']: (arg: any) => {
|
// ['Partitions']: (arg: any) => {
|
||||||
@@ -70,17 +82,20 @@ const OperationsSlot: any = {
|
|||||||
// eslint-disable-next-line react/display-name
|
// eslint-disable-next-line react/display-name
|
||||||
['ConsumerGroups']: (arg: any) => {
|
['ConsumerGroups']: (arg: any) => {
|
||||||
return (
|
return (
|
||||||
<SearchInput
|
<>
|
||||||
onSearch={arg.setSearchKeywords}
|
<Reload {...arg} />
|
||||||
attrs={{
|
<SearchInput
|
||||||
value: arg.searchValue,
|
onSearch={arg.setSearchKeywords}
|
||||||
onChange: arg.setSearchValue,
|
attrs={{
|
||||||
placeholder: '请输入Consumer Group',
|
value: arg.searchValue,
|
||||||
size: 'small',
|
onChange: arg.setSearchValue,
|
||||||
style: { width: '210px', marginRight: '2px' },
|
placeholder: '请输入Consumer Group',
|
||||||
maxLength: 128,
|
size: 'small',
|
||||||
}}
|
style: { width: '210px', marginRight: '2px' },
|
||||||
/>
|
maxLength: 128,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -94,6 +109,7 @@ const TopicDetail = (props: any) => {
|
|||||||
const [searchValue, setSearchValue] = useState<string>('');
|
const [searchValue, setSearchValue] = useState<string>('');
|
||||||
const [visible, setVisible] = useState(false);
|
const [visible, setVisible] = useState(false);
|
||||||
const [hashData, setHashData] = useState<any>({});
|
const [hashData, setHashData] = useState<any>({});
|
||||||
|
const [refreshKey, forceRefresh] = useForceRefresh();
|
||||||
|
|
||||||
const callback = (key: any) => {
|
const callback = (key: any) => {
|
||||||
setSearchValue('');
|
setSearchValue('');
|
||||||
@@ -184,7 +200,7 @@ const TopicDetail = (props: any) => {
|
|||||||
onChange={callback}
|
onChange={callback}
|
||||||
tabBarExtraContent={
|
tabBarExtraContent={
|
||||||
OperationsSlot[positionType] &&
|
OperationsSlot[positionType] &&
|
||||||
OperationsSlot[positionType]({ ...props, setSearchKeywords, setSearchValue, searchValue, positionType })
|
OperationsSlot[positionType]({ ...props, setSearchKeywords, setSearchValue, searchValue, positionType, forceRefresh })
|
||||||
}
|
}
|
||||||
destroyInactiveTabPane
|
destroyInactiveTabPane
|
||||||
>
|
>
|
||||||
@@ -196,7 +212,7 @@ const TopicDetail = (props: any) => {
|
|||||||
</TabPane>
|
</TabPane>
|
||||||
<TabPane tab="ConsumerGroups" key="ConsumerGroups">
|
<TabPane tab="ConsumerGroups" key="ConsumerGroups">
|
||||||
{positionType === 'ConsumerGroups' && (
|
{positionType === 'ConsumerGroups' && (
|
||||||
<Consumers searchKeywords={searchKeywords} positionType={positionType} hashData={hashData} />
|
<Consumers searchKeywords={searchKeywords} positionType={positionType} hashData={hashData} key={`${refreshKey}`} />
|
||||||
)}
|
)}
|
||||||
</TabPane>
|
</TabPane>
|
||||||
<TabPane tab="ACLs" key="ACLs">
|
<TabPane tab="ACLs" key="ACLs">
|
||||||
|
|||||||
@@ -1,7 +1,22 @@
|
|||||||
/* eslint-disable react/display-name */
|
/* eslint-disable react/display-name */
|
||||||
import React, { useState, useEffect } from 'react';
|
import React, { useState, useEffect } from 'react';
|
||||||
import { useHistory, useParams } from 'react-router-dom';
|
import { useHistory, useParams } from 'react-router-dom';
|
||||||
import { AppContainer, Input, ProTable, Select, Switch, Tooltip, Utils, Dropdown, Menu, Button, Divider, Tag } from 'knowdesign';
|
import {
|
||||||
|
AppContainer,
|
||||||
|
Input,
|
||||||
|
ProTable,
|
||||||
|
Select,
|
||||||
|
Switch,
|
||||||
|
Tooltip,
|
||||||
|
Utils,
|
||||||
|
Dropdown,
|
||||||
|
Menu,
|
||||||
|
Button,
|
||||||
|
Divider,
|
||||||
|
Tag,
|
||||||
|
Popconfirm,
|
||||||
|
notification,
|
||||||
|
} from 'knowdesign';
|
||||||
import { IconFont } from '@knowdesign/icons';
|
import { IconFont } from '@knowdesign/icons';
|
||||||
import Create from './Create';
|
import Create from './Create';
|
||||||
import './index.less';
|
import './index.less';
|
||||||
@@ -85,6 +100,21 @@ const AutoPage = (props: any) => {
|
|||||||
setTopicListLoading(false);
|
setTopicListLoading(false);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
const deleteTopicData = (record: any) => {
|
||||||
|
console.log(record, 'record');
|
||||||
|
const params = {
|
||||||
|
clusterId: Number(routeParams.clusterId),
|
||||||
|
topicName: record.topicName,
|
||||||
|
};
|
||||||
|
Utils.post(Api.deleteTopicData(), params).then((data: any) => {
|
||||||
|
if (data === null) {
|
||||||
|
notification.success({
|
||||||
|
message: '清除数据成功',
|
||||||
|
});
|
||||||
|
getTopicsList();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
getTopicsList();
|
getTopicsList();
|
||||||
}, [sortObj, showInternalTopics, searchKeywords, pageIndex, pageSize]);
|
}, [sortObj, showInternalTopics, searchKeywords, pageIndex, pageSize]);
|
||||||
@@ -247,7 +277,7 @@ const AutoPage = (props: any) => {
|
|||||||
dataIndex: 'desc',
|
dataIndex: 'desc',
|
||||||
key: 'desc',
|
key: 'desc',
|
||||||
fixed: 'right',
|
fixed: 'right',
|
||||||
width: 140,
|
width: 200,
|
||||||
render: (value: any, record: any) => {
|
render: (value: any, record: any) => {
|
||||||
return (
|
return (
|
||||||
<div className="operation-list">
|
<div className="operation-list">
|
||||||
@@ -257,6 +287,19 @@ const AutoPage = (props: any) => {
|
|||||||
<></>
|
<></>
|
||||||
)}
|
)}
|
||||||
{global.hasPermission(ClustersPermissionMap.TOPIC_DEL) ? <Delete record={record} onConfirm={getTopicsList}></Delete> : <></>}
|
{global.hasPermission(ClustersPermissionMap.TOPIC_DEL) ? <Delete record={record} onConfirm={getTopicsList}></Delete> : <></>}
|
||||||
|
{global.hasPermission(ClustersPermissionMap.TOPIC_DEL) ? ( // TODO:替换为清除数据的权限
|
||||||
|
<Popconfirm
|
||||||
|
placement="topRight"
|
||||||
|
title={`是否要清空当前Topic的数据?`}
|
||||||
|
onConfirm={() => deleteTopicData(record)}
|
||||||
|
okText="是"
|
||||||
|
cancelText="否"
|
||||||
|
>
|
||||||
|
<Button type="link">清除数据</Button>
|
||||||
|
</Popconfirm>
|
||||||
|
) : (
|
||||||
|
<></>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,15 +1,13 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.core.service.acl;
|
package com.xiaojukeji.know.streaming.km.core.service.acl;
|
||||||
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.po.KafkaAclPO;
|
import com.xiaojukeji.know.streaming.km.common.bean.po.KafkaAclPO;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.meta.MetaDataService;
|
||||||
import org.apache.kafka.common.acl.AclBinding;
|
import org.apache.kafka.common.acl.AclBinding;
|
||||||
import org.apache.kafka.common.resource.ResourceType;
|
import org.apache.kafka.common.resource.ResourceType;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public interface KafkaAclService {
|
public interface KafkaAclService extends MetaDataService<AclBinding> {
|
||||||
Result<List<AclBinding>> getAclFromKafka(Long clusterPhyId);
|
|
||||||
|
|
||||||
List<KafkaAclPO> getKafkaAclFromDB(Long clusterPhyId);
|
List<KafkaAclPO> getKafkaAclFromDB(Long clusterPhyId);
|
||||||
|
|
||||||
Integer countKafkaAclFromDB(Long clusterPhyId);
|
Integer countKafkaAclFromDB(Long clusterPhyId);
|
||||||
@@ -17,10 +15,5 @@ public interface KafkaAclService {
|
|||||||
Integer countResTypeAndDistinctFromDB(Long clusterPhyId, ResourceType resourceType);
|
Integer countResTypeAndDistinctFromDB(Long clusterPhyId, ResourceType resourceType);
|
||||||
|
|
||||||
Integer countKafkaUserAndDistinctFromDB(Long clusterPhyId);
|
Integer countKafkaUserAndDistinctFromDB(Long clusterPhyId);
|
||||||
|
|
||||||
List<KafkaAclPO> getKafkaResTypeAclFromDB(Long clusterPhyId, Integer resType);
|
|
||||||
|
|
||||||
List<KafkaAclPO> getTopicAclFromDB(Long clusterPhyId, String topicName);
|
List<KafkaAclPO> getTopicAclFromDB(Long clusterPhyId, String topicName);
|
||||||
|
|
||||||
List<KafkaAclPO> getGroupAclFromDB(Long clusterPhyId, String groupName);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,10 +3,6 @@ package com.xiaojukeji.know.streaming.km.core.service.acl;
|
|||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.acl.ACLAtomParam;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.acl.ACLAtomParam;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.po.KafkaAclPO;
|
import com.xiaojukeji.know.streaming.km.common.bean.po.KafkaAclPO;
|
||||||
import org.apache.kafka.common.resource.ResourceType;
|
|
||||||
|
|
||||||
import java.util.Date;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public interface OpKafkaAclService {
|
public interface OpKafkaAclService {
|
||||||
/**
|
/**
|
||||||
@@ -19,14 +15,5 @@ public interface OpKafkaAclService {
|
|||||||
*/
|
*/
|
||||||
Result<Void> deleteKafkaAcl(ACLAtomParam aclAtomParam, String operator);
|
Result<Void> deleteKafkaAcl(ACLAtomParam aclAtomParam, String operator);
|
||||||
|
|
||||||
/**
|
|
||||||
* 删除ACL
|
|
||||||
*/
|
|
||||||
Result<Void> deleteKafkaAclByResName(ResourceType resourceType, String resourceName, String operator);
|
|
||||||
|
|
||||||
Result<Void> insertAndIgnoreDuplicate(KafkaAclPO kafkaAclPO);
|
Result<Void> insertAndIgnoreDuplicate(KafkaAclPO kafkaAclPO);
|
||||||
|
|
||||||
void batchUpdateAcls(Long clusterPhyId, List<KafkaAclPO> poList);
|
|
||||||
|
|
||||||
int deleteByUpdateTimeBeforeInDB(Long clusterPhyId, Date beforeTime);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
|
|||||||
import com.xiaojukeji.know.streaming.km.common.bean.po.KafkaAclPO;
|
import com.xiaojukeji.know.streaming.km.common.bean.po.KafkaAclPO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
||||||
import com.xiaojukeji.know.streaming.km.common.constant.KafkaConstant;
|
import com.xiaojukeji.know.streaming.km.common.constant.KafkaConstant;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.converter.KafkaAclConverter;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.cluster.ClusterAuthTypeEnum;
|
import com.xiaojukeji.know.streaming.km.common.enums.cluster.ClusterAuthTypeEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException;
|
import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException;
|
||||||
@@ -18,8 +19,6 @@ import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
|||||||
import com.xiaojukeji.know.streaming.km.core.service.acl.KafkaAclService;
|
import com.xiaojukeji.know.streaming.km.core.service.acl.KafkaAclService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService;
|
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService;
|
import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService;
|
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.cache.LoadedClusterPhyCache;
|
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient;
|
import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient;
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient;
|
import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient;
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.mysql.KafkaAclDAO;
|
import com.xiaojukeji.know.streaming.km.persistence.mysql.KafkaAclDAO;
|
||||||
@@ -36,11 +35,13 @@ import org.apache.kafka.common.resource.ResourceType;
|
|||||||
import org.apache.kafka.common.security.auth.KafkaPrincipal;
|
import org.apache.kafka.common.security.auth.KafkaPrincipal;
|
||||||
import org.apache.kafka.common.utils.SecurityUtils;
|
import org.apache.kafka.common.utils.SecurityUtils;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.dao.DuplicateKeyException;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
import javax.annotation.PostConstruct;
|
import javax.annotation.PostConstruct;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.List;
|
import java.util.function.Function;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import scala.jdk.javaapi.CollectionConverters;
|
import scala.jdk.javaapi.CollectionConverters;
|
||||||
|
|
||||||
@@ -77,18 +78,49 @@ public class KafkaAclServiceImpl extends BaseKafkaVersionControlService implemen
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Result<List<AclBinding>> getAclFromKafka(Long clusterPhyId) {
|
public Result<List<AclBinding>> getDataFromKafka(ClusterPhy clusterPhy) {
|
||||||
if (LoadedClusterPhyCache.getByPhyId(clusterPhyId) == null) {
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getClusterPhyNotExist(clusterPhyId));
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return (Result<List<AclBinding>>) versionControlService.doHandler(getVersionItemType(), getMethodName(clusterPhyId, ACL_GET_FROM_KAFKA), new ClusterPhyParam(clusterPhyId));
|
Result<List<AclBinding>> dataResult = (Result<List<AclBinding>>) versionControlService.doHandler(getVersionItemType(), getMethodName(clusterPhy.getId(), ACL_GET_FROM_KAFKA), new ClusterPhyParam(clusterPhy.getId()));
|
||||||
|
if (dataResult.failed()) {
|
||||||
|
Result.buildFromIgnoreData(dataResult);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Result.buildSuc(dataResult.getData());
|
||||||
} catch (VCHandlerNotExistException e) {
|
} catch (VCHandlerNotExistException e) {
|
||||||
return Result.buildFailure(e.getResultStatus());
|
return Result.buildFailure(e.getResultStatus());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeToDB(Long clusterPhyId, List<AclBinding> dataList) {
|
||||||
|
Map<String, KafkaAclPO> dbPOMap = this.getKafkaAclFromDB(clusterPhyId).stream().collect(Collectors.toMap(KafkaAclPO::getUniqueField, Function.identity()));
|
||||||
|
|
||||||
|
long now = System.currentTimeMillis();
|
||||||
|
for (AclBinding aclBinding: dataList) {
|
||||||
|
KafkaAclPO newPO = KafkaAclConverter.convert2KafkaAclPO(clusterPhyId, aclBinding, now);
|
||||||
|
KafkaAclPO oldPO = dbPOMap.remove(newPO.getUniqueField());
|
||||||
|
if (oldPO == null) {
|
||||||
|
// 新增的ACL
|
||||||
|
this.insertAndIgnoreDuplicate(newPO);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 不需要update
|
||||||
|
}
|
||||||
|
|
||||||
|
// 删除已经不存在的
|
||||||
|
for (KafkaAclPO dbPO: dbPOMap.values()) {
|
||||||
|
kafkaAclDAO.deleteById(dbPO);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int deleteInDBByKafkaClusterId(Long clusterPhyId) {
|
||||||
|
LambdaQueryWrapper<KafkaAclPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
||||||
|
lambdaQueryWrapper.eq(KafkaAclPO::getClusterPhyId, clusterPhyId);
|
||||||
|
|
||||||
|
return kafkaAclDAO.delete(lambdaQueryWrapper);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<KafkaAclPO> getKafkaAclFromDB(Long clusterPhyId) {
|
public List<KafkaAclPO> getKafkaAclFromDB(Long clusterPhyId) {
|
||||||
LambdaQueryWrapper<KafkaAclPO> queryWrapper = new LambdaQueryWrapper<>();
|
LambdaQueryWrapper<KafkaAclPO> queryWrapper = new LambdaQueryWrapper<>();
|
||||||
@@ -116,7 +148,7 @@ public class KafkaAclServiceImpl extends BaseKafkaVersionControlService implemen
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (int)poList.stream().map(elem -> elem.getResourceName()).distinct().count();
|
return (int)poList.stream().map(KafkaAclPO::getResourceName).distinct().count();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -130,15 +162,7 @@ public class KafkaAclServiceImpl extends BaseKafkaVersionControlService implemen
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (int)poList.stream().map(elem -> elem.getPrincipal()).distinct().count();
|
return (int)poList.stream().map(KafkaAclPO::getPrincipal).distinct().count();
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<KafkaAclPO> getKafkaResTypeAclFromDB(Long clusterPhyId, Integer resType) {
|
|
||||||
LambdaQueryWrapper<KafkaAclPO> queryWrapper = new LambdaQueryWrapper<>();
|
|
||||||
queryWrapper.eq(KafkaAclPO::getClusterPhyId, clusterPhyId);
|
|
||||||
queryWrapper.eq(KafkaAclPO::getResourceType, resType);
|
|
||||||
return kafkaAclDAO.selectList(queryWrapper);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -152,15 +176,6 @@ public class KafkaAclServiceImpl extends BaseKafkaVersionControlService implemen
|
|||||||
return kafkaAclDAO.selectList(queryWrapper);
|
return kafkaAclDAO.selectList(queryWrapper);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<KafkaAclPO> getGroupAclFromDB(Long clusterPhyId, String groupName) {
|
|
||||||
LambdaQueryWrapper<KafkaAclPO> queryWrapper = new LambdaQueryWrapper<>();
|
|
||||||
queryWrapper.eq(KafkaAclPO::getClusterPhyId, clusterPhyId);
|
|
||||||
queryWrapper.eq(KafkaAclPO::getResourceType, ResourceType.GROUP.code());
|
|
||||||
queryWrapper.eq(KafkaAclPO::getResourceName, groupName);
|
|
||||||
return kafkaAclDAO.selectList(queryWrapper);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**************************************************** private method ****************************************************/
|
/**************************************************** private method ****************************************************/
|
||||||
|
|
||||||
private Result<List<AclBinding>> getAclByZKClient(VersionItemParam itemParam){
|
private Result<List<AclBinding>> getAclByZKClient(VersionItemParam itemParam){
|
||||||
@@ -170,7 +185,7 @@ public class KafkaAclServiceImpl extends BaseKafkaVersionControlService implemen
|
|||||||
for (ZkAclStore store: CollectionConverters.asJava(ZkAclStore.stores())) {
|
for (ZkAclStore store: CollectionConverters.asJava(ZkAclStore.stores())) {
|
||||||
Result<List<AclBinding>> rl = this.getSpecifiedTypeAclByZKClient(param.getClusterPhyId(), store.patternType());
|
Result<List<AclBinding>> rl = this.getSpecifiedTypeAclByZKClient(param.getClusterPhyId(), store.patternType());
|
||||||
if (rl.failed()) {
|
if (rl.failed()) {
|
||||||
return rl;
|
return Result.buildFromIgnoreData(rl);
|
||||||
}
|
}
|
||||||
|
|
||||||
aclList.addAll(rl.getData());
|
aclList.addAll(rl.getData());
|
||||||
@@ -229,4 +244,19 @@ public class KafkaAclServiceImpl extends BaseKafkaVersionControlService implemen
|
|||||||
|
|
||||||
return Result.buildSuc(kafkaAclList);
|
return Result.buildSuc(kafkaAclList);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Result<Void> insertAndIgnoreDuplicate(KafkaAclPO kafkaAclPO) {
|
||||||
|
try {
|
||||||
|
kafkaAclDAO.insert(kafkaAclPO);
|
||||||
|
|
||||||
|
return Result.buildSuc();
|
||||||
|
} catch (DuplicateKeyException dke) {
|
||||||
|
// 直接写入,如果出现key冲突则直接忽略,因为key冲突时,表示该数据已完整存在,不需要替换任何数据
|
||||||
|
return Result.buildSuc();
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("method=insertAndIgnoreDuplicate||kafkaAclPO={}||errMsg=exception", kafkaAclPO, e);
|
||||||
|
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.MYSQL_OPERATE_FAILED, e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistExcept
|
|||||||
import com.xiaojukeji.know.streaming.km.core.service.acl.OpKafkaAclService;
|
import com.xiaojukeji.know.streaming.km.core.service.acl.OpKafkaAclService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService;
|
import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService;
|
import com.xiaojukeji.know.streaming.km.core.service.version.BaseKafkaVersionControlService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService;
|
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient;
|
import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminClient;
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient;
|
import com.xiaojukeji.know.streaming.km.persistence.kafka.KafkaAdminZKClient;
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.mysql.KafkaAclDAO;
|
import com.xiaojukeji.know.streaming.km.persistence.mysql.KafkaAclDAO;
|
||||||
@@ -32,7 +31,6 @@ import org.apache.kafka.clients.admin.*;
|
|||||||
import org.apache.kafka.common.acl.*;
|
import org.apache.kafka.common.acl.*;
|
||||||
import org.apache.kafka.common.resource.ResourcePattern;
|
import org.apache.kafka.common.resource.ResourcePattern;
|
||||||
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
||||||
import org.apache.kafka.common.resource.ResourceType;
|
|
||||||
import org.apache.kafka.common.security.auth.KafkaPrincipal;
|
import org.apache.kafka.common.security.auth.KafkaPrincipal;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.dao.DuplicateKeyException;
|
import org.springframework.dao.DuplicateKeyException;
|
||||||
@@ -41,8 +39,6 @@ import scala.jdk.javaapi.CollectionConverters;
|
|||||||
|
|
||||||
import javax.annotation.PostConstruct;
|
import javax.annotation.PostConstruct;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.function.Function;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum.*;
|
import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum.*;
|
||||||
|
|
||||||
@@ -169,11 +165,6 @@ public class OpKafkaAclServiceImpl extends BaseKafkaVersionControlService implem
|
|||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Result<Void> deleteKafkaAclByResName(ResourceType resourceType, String resourceName, String operator) {
|
|
||||||
return Result.buildSuc();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Result<Void> insertAndIgnoreDuplicate(KafkaAclPO kafkaAclPO) {
|
public Result<Void> insertAndIgnoreDuplicate(KafkaAclPO kafkaAclPO) {
|
||||||
try {
|
try {
|
||||||
@@ -190,34 +181,6 @@ public class OpKafkaAclServiceImpl extends BaseKafkaVersionControlService implem
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void batchUpdateAcls(Long clusterPhyId, List<KafkaAclPO> poList) {
|
|
||||||
LambdaQueryWrapper<KafkaAclPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
|
||||||
lambdaQueryWrapper.eq(KafkaAclPO::getClusterPhyId, clusterPhyId);
|
|
||||||
|
|
||||||
Map<String, KafkaAclPO> dbPOMap = kafkaAclDAO.selectList(lambdaQueryWrapper).stream().collect(Collectors.toMap(KafkaAclPO::getUniqueField, Function.identity()));
|
|
||||||
for (KafkaAclPO po: poList) {
|
|
||||||
KafkaAclPO dbPO = dbPOMap.remove(po.getUniqueField());
|
|
||||||
if (dbPO == null) {
|
|
||||||
// 新增的ACL
|
|
||||||
this.insertAndIgnoreDuplicate(po);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 删除已经不存在的
|
|
||||||
for (KafkaAclPO dbPO: dbPOMap.values()) {
|
|
||||||
kafkaAclDAO.deleteById(dbPO);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int deleteByUpdateTimeBeforeInDB(Long clusterPhyId, Date beforeTime) {
|
|
||||||
LambdaQueryWrapper<KafkaAclPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
|
||||||
lambdaQueryWrapper.eq(KafkaAclPO::getClusterPhyId, clusterPhyId);
|
|
||||||
lambdaQueryWrapper.le(KafkaAclPO::getUpdateTime, beforeTime);
|
|
||||||
return kafkaAclDAO.delete(lambdaQueryWrapper);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**************************************************** private method ****************************************************/
|
/**************************************************** private method ****************************************************/
|
||||||
|
|
||||||
private Result<Void> deleteInDB(KafkaAclPO kafkaAclPO) {
|
private Result<Void> deleteInDB(KafkaAclPO kafkaAclPO) {
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
|
|||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.event.cluster.ClusterPhyAddedEvent;
|
import com.xiaojukeji.know.streaming.km.common.bean.event.cluster.ClusterPhyAddedEvent;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.event.cluster.connect.ClusterPhyDeletedEvent;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.po.cluster.ClusterPhyPO;
|
import com.xiaojukeji.know.streaming.km.common.bean.po.cluster.ClusterPhyPO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.component.SpringTool;
|
import com.xiaojukeji.know.streaming.km.common.component.SpringTool;
|
||||||
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
||||||
@@ -146,6 +147,9 @@ public class ClusterPhyServiceImpl implements ClusterPhyService {
|
|||||||
String.format("删除集群:%s",clusterPhy.toString()));
|
String.format("删除集群:%s",clusterPhy.toString()));
|
||||||
opLogWrapService.saveOplogAndIgnoreException(oplogDTO);
|
opLogWrapService.saveOplogAndIgnoreException(oplogDTO);
|
||||||
|
|
||||||
|
// 发布删除集群事件
|
||||||
|
SpringTool.publish(new ClusterPhyDeletedEvent(this, clusterPhyId));
|
||||||
|
|
||||||
return Result.buildSuc();
|
return Result.buildSuc();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error("method=removeClusterPhyById||clusterPhyId={}||operator={}||msg=remove cluster failed||errMsg=exception!",
|
log.error("method=removeClusterPhyById||clusterPhyId={}||operator={}||msg=remove cluster failed||errMsg=exception!",
|
||||||
|
|||||||
@@ -4,14 +4,16 @@ package com.xiaojukeji.know.streaming.km.core.service.connect.cluster;
|
|||||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.cluster.ConnectClusterDTO;
|
import com.xiaojukeji.know.streaming.km.common.bean.dto.connect.cluster.ConnectClusterDTO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectClusterMetadata;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectClusterMetadata;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.kafka.KSGroupDescription;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.meta.MetaDataService;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Connect-Cluster
|
* Connect-Cluster
|
||||||
*/
|
*/
|
||||||
public interface ConnectClusterService {
|
public interface ConnectClusterService extends MetaDataService<KSGroupDescription> {
|
||||||
Long replaceAndReturnIdInDB(ConnectClusterMetadata metadata);
|
Long replaceAndReturnIdInDB(ConnectClusterMetadata metadata);
|
||||||
|
|
||||||
List<ConnectCluster> listByKafkaCluster(Long kafkaClusterPhyId);
|
List<ConnectCluster> listByKafkaCluster(Long kafkaClusterPhyId);
|
||||||
|
|||||||
@@ -24,9 +24,9 @@ import com.xiaojukeji.know.streaming.km.core.cache.CollectedMetricsLocalCache;
|
|||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterMetricService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterMetricService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.version.BaseConnectorMetricService;
|
import com.xiaojukeji.know.streaming.km.core.service.version.BaseConnectMetricService;
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.connect.ConnectJMXClient;
|
import com.xiaojukeji.know.streaming.km.persistence.connect.ConnectJMXClient;
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.es.dao.connect.ConnectClusterMetricESDAO;
|
import com.xiaojukeji.know.streaming.km.persistence.es.dao.connect.cluster.ConnectClusterMetricESDAO;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
import org.springframework.util.CollectionUtils;
|
import org.springframework.util.CollectionUtils;
|
||||||
@@ -43,7 +43,7 @@ import static com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultS
|
|||||||
* @author didi
|
* @author didi
|
||||||
*/
|
*/
|
||||||
@Service
|
@Service
|
||||||
public class ConnectClusterMetricServiceImpl extends BaseConnectorMetricService implements ConnectClusterMetricService {
|
public class ConnectClusterMetricServiceImpl extends BaseConnectMetricService implements ConnectClusterMetricService {
|
||||||
protected static final ILog LOGGER = LogFactory.getLog(ConnectClusterMetricServiceImpl.class);
|
protected static final ILog LOGGER = LogFactory.getLog(ConnectClusterMetricServiceImpl.class);
|
||||||
|
|
||||||
public static final String CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_AVG = "getWorkerMetricAvg";
|
public static final String CONNECT_CLUSTER_METHOD_GET_WORKER_METRIC_AVG = "getWorkerMetricAvg";
|
||||||
@@ -86,8 +86,7 @@ public class ConnectClusterMetricServiceImpl extends BaseConnectorMetricService
|
|||||||
String connectClusterMetricKey = CollectedMetricsLocalCache.genConnectClusterMetricCacheKey(connectClusterPhyId, metric);
|
String connectClusterMetricKey = CollectedMetricsLocalCache.genConnectClusterMetricCacheKey(connectClusterPhyId, metric);
|
||||||
Float keyValue = CollectedMetricsLocalCache.getConnectClusterMetrics(connectClusterMetricKey);
|
Float keyValue = CollectedMetricsLocalCache.getConnectClusterMetrics(connectClusterMetricKey);
|
||||||
if (keyValue != null) {
|
if (keyValue != null) {
|
||||||
ConnectClusterMetrics connectClusterMetrics = ConnectClusterMetrics.initWithMetric(connectClusterPhyId,metric,keyValue);
|
return Result.buildSuc(new ConnectClusterMetrics(connectClusterPhyId, metric, keyValue));
|
||||||
return Result.buildSuc(connectClusterMetrics);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Result<ConnectClusterMetrics> ret = this.collectConnectClusterMetricsFromKafka(connectClusterPhyId, metric);
|
Result<ConnectClusterMetrics> ret = this.collectConnectClusterMetricsFromKafka(connectClusterPhyId, metric);
|
||||||
@@ -209,8 +208,7 @@ public class ConnectClusterMetricServiceImpl extends BaseConnectorMetricService
|
|||||||
try {
|
try {
|
||||||
//2、获取jmx指标
|
//2、获取jmx指标
|
||||||
String value = jmxConnectorWrap.getAttribute(new ObjectName(jmxInfo.getJmxObjectName()), jmxInfo.getJmxAttribute()).toString();
|
String value = jmxConnectorWrap.getAttribute(new ObjectName(jmxInfo.getJmxObjectName()), jmxInfo.getJmxAttribute()).toString();
|
||||||
ConnectWorkerMetrics connectWorkerMetrics = ConnectWorkerMetrics.initWithMetric(connectClusterId, workerId, metric, Float.valueOf(value));
|
return Result.buildSuc(new ConnectWorkerMetrics(connectClusterId, workerId, metric, Float.valueOf(value)));
|
||||||
return Result.buildSuc(connectWorkerMetrics);
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("method=getConnectWorkerMetricsByJMX||connectClusterId={}||workerId={}||metrics={}||jmx={}||msg={}",
|
LOGGER.error("method=getConnectWorkerMetricsByJMX||connectClusterId={}||workerId={}||metrics={}||jmx={}||msg={}",
|
||||||
connectClusterId, workerId, metric, jmxInfo.getJmxObjectName(), e.getClass().getName());
|
connectClusterId, workerId, metric, jmxInfo.getJmxObjectName(), e.getClass().getName());
|
||||||
@@ -231,8 +229,8 @@ public class ConnectClusterMetricServiceImpl extends BaseConnectorMetricService
|
|||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected List<MetricMultiLinesVO> metricMap2VO(Long connectClusterId,
|
private List<MetricMultiLinesVO> metricMap2VO(Long connectClusterId,
|
||||||
Map<String/*metric*/, Map<Long, List<MetricPointVO>>> map){
|
Map<String/*metric*/, Map<Long, List<MetricPointVO>>> map){
|
||||||
List<MetricMultiLinesVO> multiLinesVOS = new ArrayList<>();
|
List<MetricMultiLinesVO> multiLinesVOS = new ArrayList<>();
|
||||||
if (map == null || map.isEmpty()) {
|
if (map == null || map.isEmpty()) {
|
||||||
// 如果为空,则直接返回
|
// 如果为空,则直接返回
|
||||||
|
|||||||
@@ -38,6 +38,14 @@ public class ConnectClusterServiceImpl implements ConnectClusterService {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private OpLogWrapService opLogWrapService;
|
private OpLogWrapService opLogWrapService;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int deleteInDBByKafkaClusterId(Long clusterPhyId) {
|
||||||
|
LambdaQueryWrapper<ConnectClusterPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
||||||
|
lambdaQueryWrapper.eq(ConnectClusterPO::getKafkaClusterPhyId, clusterPhyId);
|
||||||
|
|
||||||
|
return connectClusterDAO.deleteById(lambdaQueryWrapper);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Long replaceAndReturnIdInDB(ConnectClusterMetadata metadata) {
|
public Long replaceAndReturnIdInDB(ConnectClusterMetadata metadata) {
|
||||||
ConnectClusterPO oldPO = this.getPOFromDB(metadata.getKafkaClusterPhyId(), metadata.getGroupName());
|
ConnectClusterPO oldPO = this.getPOFromDB(metadata.getKafkaClusterPhyId(), metadata.getGroupName());
|
||||||
|
|||||||
@@ -4,49 +4,30 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluste
|
|||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorInfo;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorInfo;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorStateInfo;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorStateInfo;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.meta.MetaDataService;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO;
|
import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum;
|
import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Properties;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 查看Connector
|
* 查看Connector
|
||||||
*/
|
*/
|
||||||
public interface ConnectorService {
|
public interface ConnectorService extends MetaDataService<KSConnector> {
|
||||||
Result<KSConnectorInfo> createConnector(Long connectClusterId, String connectorName, Properties configs, String operator);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 获取所有的连接器名称列表
|
* 获取所有的连接器名称列表
|
||||||
*/
|
*/
|
||||||
Result<List<String>> listConnectorsFromCluster(Long connectClusterId);
|
Result<List<String>> listConnectorsFromCluster(ConnectCluster connectCluster);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 获取单个连接器信息
|
* 获取单个连接器信息
|
||||||
*/
|
*/
|
||||||
Result<KSConnectorInfo> getConnectorInfoFromCluster(Long connectClusterId, String connectorName);
|
Result<KSConnectorInfo> getConnectorInfoFromCluster(Long connectClusterId, String connectorName);
|
||||||
|
|
||||||
Result<List<String>> getConnectorTopicsFromCluster(Long connectClusterId, String connectorName);
|
|
||||||
|
|
||||||
Result<KSConnectorStateInfo> getConnectorStateInfoFromCluster(Long connectClusterId, String connectorName);
|
Result<KSConnectorStateInfo> getConnectorStateInfoFromCluster(Long connectClusterId, String connectorName);
|
||||||
|
|
||||||
Result<KSConnector> getAllConnectorInfoFromCluster(Long connectClusterId, String connectorName);
|
Result<KSConnector> getConnectorFromKafka(Long connectClusterId, String connectorName);
|
||||||
|
|
||||||
Result<Void> resumeConnector(Long connectClusterId, String connectorName, String operator);
|
|
||||||
|
|
||||||
Result<Void> restartConnector(Long connectClusterId, String connectorName, String operator);
|
|
||||||
|
|
||||||
Result<Void> stopConnector(Long connectClusterId, String connectorName, String operator);
|
|
||||||
|
|
||||||
Result<Void> deleteConnector(Long connectClusterId, String connectorName, String operator);
|
|
||||||
|
|
||||||
Result<Void> updateConnectorConfig(Long connectClusterId, String connectorName, Properties configs, String operator);
|
|
||||||
|
|
||||||
void batchReplace(Long kafkaClusterPhyId, Long connectClusterId, List<KSConnector> connectorList, Set<String> allConnectorNameSet);
|
|
||||||
|
|
||||||
void addNewToDB(KSConnector connector);
|
|
||||||
|
|
||||||
List<ConnectorPO> listByKafkaClusterIdFromDB(Long kafkaClusterPhyId);
|
List<ConnectorPO> listByKafkaClusterIdFromDB(Long kafkaClusterPhyId);
|
||||||
|
|
||||||
@@ -57,6 +38,4 @@ public interface ConnectorService {
|
|||||||
ConnectorPO getConnectorFromDB(Long connectClusterId, String connectorName);
|
ConnectorPO getConnectorFromDB(Long connectClusterId, String connectorName);
|
||||||
|
|
||||||
ConnectorTypeEnum getConnectorType(Long connectClusterId, String connectorName);
|
ConnectorTypeEnum getConnectorType(Long connectClusterId, String connectorName);
|
||||||
|
|
||||||
void completeMirrorMakerInfo(ConnectCluster connectCluster, List<KSConnector> connectorList);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,26 @@
|
|||||||
|
package com.xiaojukeji.know.streaming.km.core.service.connect.connector;
|
||||||
|
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorInfo;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 查看Connector
|
||||||
|
*/
|
||||||
|
public interface OpConnectorService {
|
||||||
|
Result<KSConnectorInfo> createConnector(Long connectClusterId, String connectorName, Properties configs, String operator);
|
||||||
|
|
||||||
|
Result<Void> resumeConnector(Long connectClusterId, String connectorName, String operator);
|
||||||
|
|
||||||
|
Result<Void> restartConnector(Long connectClusterId, String connectorName, String operator);
|
||||||
|
|
||||||
|
Result<Void> stopConnector(Long connectClusterId, String connectorName, String operator);
|
||||||
|
|
||||||
|
Result<Void> deleteConnector(Long connectClusterId, String connectorName, String operator);
|
||||||
|
|
||||||
|
Result<Void> updateConnectorConfig(Long connectClusterId, String connectorName, Properties configs, String operator);
|
||||||
|
|
||||||
|
void addNewToDB(KSConnector connector);
|
||||||
|
}
|
||||||
@@ -18,6 +18,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.connect.Connector
|
|||||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricLineVO;
|
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricLineVO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO;
|
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.line.MetricMultiLinesVO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
|
import com.xiaojukeji.know.streaming.km.common.bean.vo.metrics.point.MetricPointVO;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectStatusEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum;
|
import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException;
|
import com.xiaojukeji.know.streaming.km.common.exception.VCHandlerNotExistException;
|
||||||
@@ -32,7 +33,7 @@ import com.xiaojukeji.know.streaming.km.core.service.connect.connector.Connector
|
|||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerConnectorService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerConnectorService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.worker.WorkerService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.health.state.HealthStateService;
|
import com.xiaojukeji.know.streaming.km.core.service.health.state.HealthStateService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.version.BaseConnectorMetricService;
|
import com.xiaojukeji.know.streaming.km.core.service.version.BaseConnectMetricService;
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.connect.ConnectJMXClient;
|
import com.xiaojukeji.know.streaming.km.persistence.connect.ConnectJMXClient;
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.es.dao.connect.connector.ConnectorMetricESDAO;
|
import com.xiaojukeji.know.streaming.km.persistence.es.dao.connect.connector.ConnectorMetricESDAO;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
@@ -52,7 +53,7 @@ import static com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultS
|
|||||||
* @author didi
|
* @author didi
|
||||||
*/
|
*/
|
||||||
@Service
|
@Service
|
||||||
public class ConnectorMetricServiceImpl extends BaseConnectorMetricService implements ConnectorMetricService {
|
public class ConnectorMetricServiceImpl extends BaseConnectMetricService implements ConnectorMetricService {
|
||||||
protected static final ILog LOGGER = LogFactory.getLog(ConnectorMetricServiceImpl.class);
|
protected static final ILog LOGGER = LogFactory.getLog(ConnectorMetricServiceImpl.class);
|
||||||
|
|
||||||
public static final String CONNECTOR_METHOD_DO_NOTHING = "doNothing";
|
public static final String CONNECTOR_METHOD_DO_NOTHING = "doNothing";
|
||||||
@@ -67,6 +68,8 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
|
|
||||||
public static final String CONNECTOR_METHOD_GET_METRIC_HEALTH_SCORE = "getMetricHealthScore";
|
public static final String CONNECTOR_METHOD_GET_METRIC_HEALTH_SCORE = "getMetricHealthScore";
|
||||||
|
|
||||||
|
public static final String CONNECTOR_METHOD_GET_METRIC_RUNNING_STATUS = "getMetricRunningStatus";
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private ConnectorMetricESDAO connectorMetricESDAO;
|
private ConnectorMetricESDAO connectorMetricESDAO;
|
||||||
|
|
||||||
@@ -98,11 +101,12 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
@Override
|
@Override
|
||||||
protected void initRegisterVCHandler() {
|
protected void initRegisterVCHandler() {
|
||||||
registerVCHandler(CONNECTOR_METHOD_DO_NOTHING, this::doNothing);
|
registerVCHandler(CONNECTOR_METHOD_DO_NOTHING, this::doNothing);
|
||||||
registerVCHandler(CONNECTOR_METHOD_GET_CONNECT_WORKER_METRIC_SUM, this::getConnectWorkerMetricSum);
|
registerVCHandler(CONNECTOR_METHOD_GET_CONNECT_WORKER_METRIC_SUM, this::getConnectWorkerMetricSum);
|
||||||
registerVCHandler(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_AVG, this::getConnectorTaskMetricsAvg);
|
registerVCHandler(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_AVG, this::getConnectorTaskMetricsAvg);
|
||||||
registerVCHandler(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_MAX, this::getConnectorTaskMetricsMax);
|
registerVCHandler(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_MAX, this::getConnectorTaskMetricsMax);
|
||||||
registerVCHandler(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, this::getConnectorTaskMetricsSum);
|
registerVCHandler(CONNECTOR_METHOD_GET_CONNECTOR_TASK_METRICS_SUM, this::getConnectorTaskMetricsSum);
|
||||||
registerVCHandler(CONNECTOR_METHOD_GET_METRIC_HEALTH_SCORE, this::getMetricHealthScore);
|
registerVCHandler(CONNECTOR_METHOD_GET_METRIC_HEALTH_SCORE, this::getMetricHealthScore);
|
||||||
|
registerVCHandler(CONNECTOR_METHOD_GET_METRIC_RUNNING_STATUS, this::getMetricRunningStatus);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -111,8 +115,7 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
Float keyValue = CollectedMetricsLocalCache.getConnectorMetrics(connectorMetricKey);
|
Float keyValue = CollectedMetricsLocalCache.getConnectorMetrics(connectorMetricKey);
|
||||||
|
|
||||||
if (null != keyValue) {
|
if (null != keyValue) {
|
||||||
ConnectorMetrics connectorMetrics = ConnectorMetrics.initWithMetric(connectClusterPhyId, connectorName, metric, keyValue);
|
return Result.buildSuc(new ConnectorMetrics(connectClusterPhyId, connectorName, metric, keyValue));
|
||||||
return Result.buildSuc(connectorMetrics);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Result<ConnectorMetrics> ret = this.collectConnectClusterMetricsFromKafka(connectClusterPhyId, connectorName, metric);
|
Result<ConnectorMetrics> ret = this.collectConnectClusterMetricsFromKafka(connectClusterPhyId, connectorName, metric);
|
||||||
@@ -216,6 +219,20 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
return Result.buildSuc(metrics);
|
return Result.buildSuc(metrics);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Result<ConnectorMetrics> getMetricRunningStatus(VersionItemParam metricParam) {
|
||||||
|
ConnectorMetricParam param = (ConnectorMetricParam) metricParam;
|
||||||
|
Long connectClusterId = param.getConnectClusterId();
|
||||||
|
String connectorName = param.getConnectorName();
|
||||||
|
String metricName = param.getMetricName();
|
||||||
|
|
||||||
|
ConnectorPO connector = connectorService.getConnectorFromDB(connectClusterId, connectorName);
|
||||||
|
if (connector == null) {
|
||||||
|
return Result.buildSuc(new ConnectorMetrics(connectClusterId, connectorName, metricName, (float)ConnectStatusEnum.UNKNOWN.getStatus()));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Result.buildSuc(new ConnectorMetrics(connectClusterId, connectorName, metricName, (float)ConnectStatusEnum.getByValue(connector.getState()).getStatus()));
|
||||||
|
}
|
||||||
|
|
||||||
private Result<ConnectorMetrics> getConnectWorkerMetricSum(VersionItemParam metricParam) {
|
private Result<ConnectorMetrics> getConnectWorkerMetricSum(VersionItemParam metricParam) {
|
||||||
ConnectorMetricParam param = (ConnectorMetricParam) metricParam;
|
ConnectorMetricParam param = (ConnectorMetricParam) metricParam;
|
||||||
Long connectClusterId = param.getConnectClusterId();
|
Long connectClusterId = param.getConnectClusterId();
|
||||||
@@ -240,12 +257,16 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
if (!isCollected) {
|
if (!isCollected) {
|
||||||
return Result.buildFailure(NOT_EXIST);
|
return Result.buildFailure(NOT_EXIST);
|
||||||
}
|
}
|
||||||
return Result.buildSuc(ConnectorMetrics.initWithMetric(connectClusterId, connectorName, metric, sum));
|
|
||||||
|
return Result.buildSuc(new ConnectorMetrics(connectClusterId, connectorName, metric, sum));
|
||||||
}
|
}
|
||||||
|
|
||||||
//kafka.connect:type=connect-worker-metrics,connector="{connector}" 指标
|
//kafka.connect:type=connect-worker-metrics,connector="{connector}" 指标
|
||||||
private Result<ConnectorMetrics> getConnectorMetric(Long connectClusterId, String workerId, String connectorName, String metric, ConnectorTypeEnum connectorType) {
|
private Result<ConnectorMetrics> getConnectorMetric(Long connectClusterId, String workerId, String connectorName, String metric, ConnectorTypeEnum connectorType) {
|
||||||
VersionConnectJmxInfo jmxInfo = getJMXInfo(connectClusterId, metric);
|
VersionConnectJmxInfo jmxInfo = getJMXInfo(connectClusterId, metric);
|
||||||
|
if (null == jmxInfo) {
|
||||||
|
return Result.buildFailure(VC_ITEM_JMX_NOT_EXIST);
|
||||||
|
}
|
||||||
|
|
||||||
if (jmxInfo.getType() != null) {
|
if (jmxInfo.getType() != null) {
|
||||||
if (connectorType == null) {
|
if (connectorType == null) {
|
||||||
@@ -257,9 +278,6 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (null == jmxInfo) {
|
|
||||||
return Result.buildFailure(VC_ITEM_JMX_NOT_EXIST);
|
|
||||||
}
|
|
||||||
String jmxObjectName = String.format(jmxInfo.getJmxObjectName(), connectorName);
|
String jmxObjectName = String.format(jmxInfo.getJmxObjectName(), connectorName);
|
||||||
|
|
||||||
JmxConnectorWrap jmxConnectorWrap = connectJMXClient.getClientWithCheck(connectClusterId, workerId);
|
JmxConnectorWrap jmxConnectorWrap = connectJMXClient.getClientWithCheck(connectClusterId, workerId);
|
||||||
@@ -270,8 +288,7 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
try {
|
try {
|
||||||
//2、获取jmx指标
|
//2、获取jmx指标
|
||||||
String value = jmxConnectorWrap.getAttribute(new ObjectName(jmxObjectName), jmxInfo.getJmxAttribute()).toString();
|
String value = jmxConnectorWrap.getAttribute(new ObjectName(jmxObjectName), jmxInfo.getJmxAttribute()).toString();
|
||||||
ConnectorMetrics connectorMetrics = ConnectorMetrics.initWithMetric(connectClusterId, connectorName, metric, Float.valueOf(value));
|
return Result.buildSuc(new ConnectorMetrics(connectClusterId, connectorName, metric, Float.valueOf(value)));
|
||||||
return Result.buildSuc(connectorMetrics);
|
|
||||||
} catch (InstanceNotFoundException e) {
|
} catch (InstanceNotFoundException e) {
|
||||||
// 忽略该错误,该错误出现的原因是该指标在JMX中不存在
|
// 忽略该错误,该错误出现的原因是该指标在JMX中不存在
|
||||||
return Result.buildSuc(new ConnectorMetrics(connectClusterId, connectorName));
|
return Result.buildSuc(new ConnectorMetrics(connectClusterId, connectorName));
|
||||||
@@ -296,8 +313,7 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
}
|
}
|
||||||
|
|
||||||
Float sum = ret.getData().stream().map(elem -> elem.getMetric(metric)).reduce(Float::sum).get();
|
Float sum = ret.getData().stream().map(elem -> elem.getMetric(metric)).reduce(Float::sum).get();
|
||||||
ConnectorMetrics connectorMetrics = ConnectorMetrics.initWithMetric(connectClusterId, connectorName, metric, sum / ret.getData().size());
|
return Result.buildSuc(new ConnectorMetrics(connectClusterId, connectorName, metric, sum / ret.getData().size()));
|
||||||
return Result.buildSuc(connectorMetrics);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Result<ConnectorMetrics> getConnectorTaskMetricsMax(VersionItemParam metricParam){
|
private Result<ConnectorMetrics> getConnectorTaskMetricsMax(VersionItemParam metricParam){
|
||||||
@@ -313,8 +329,7 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
}
|
}
|
||||||
|
|
||||||
Float max = ret.getData().stream().max((a, b) -> a.getMetric(metric).compareTo(b.getMetric(metric))).get().getMetric(metric);
|
Float max = ret.getData().stream().max((a, b) -> a.getMetric(metric).compareTo(b.getMetric(metric))).get().getMetric(metric);
|
||||||
ConnectorMetrics connectorMetrics = ConnectorMetrics.initWithMetric(connectClusterId, connectorName, metric, max);
|
return Result.buildSuc(new ConnectorMetrics(connectClusterId, connectorName, metric, max));
|
||||||
return Result.buildSuc(connectorMetrics);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Result<ConnectorMetrics> getConnectorTaskMetricsSum(VersionItemParam metricParam){
|
private Result<ConnectorMetrics> getConnectorTaskMetricsSum(VersionItemParam metricParam){
|
||||||
@@ -330,8 +345,7 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
}
|
}
|
||||||
|
|
||||||
Float sum = ret.getData().stream().map(elem -> elem.getMetric(metric)).reduce(Float::sum).get();
|
Float sum = ret.getData().stream().map(elem -> elem.getMetric(metric)).reduce(Float::sum).get();
|
||||||
ConnectorMetrics connectorMetrics = ConnectorMetrics.initWithMetric(connectClusterId, connectorName, metric, sum);
|
return Result.buildSuc(new ConnectorMetrics(connectClusterId, connectorName, metric, sum));
|
||||||
return Result.buildSuc(connectorMetrics);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -358,6 +372,9 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
|
|
||||||
private Result<ConnectorTaskMetrics> getConnectorTaskMetric(Long connectClusterId, String workerId, String connectorName, Integer taskId, String metric, ConnectorTypeEnum connectorType) {
|
private Result<ConnectorTaskMetrics> getConnectorTaskMetric(Long connectClusterId, String workerId, String connectorName, Integer taskId, String metric, ConnectorTypeEnum connectorType) {
|
||||||
VersionConnectJmxInfo jmxInfo = getJMXInfo(connectClusterId, metric);
|
VersionConnectJmxInfo jmxInfo = getJMXInfo(connectClusterId, metric);
|
||||||
|
if (null == jmxInfo) {
|
||||||
|
return Result.buildFailure(VC_ITEM_JMX_NOT_EXIST);
|
||||||
|
}
|
||||||
|
|
||||||
if (jmxInfo.getType() != null) {
|
if (jmxInfo.getType() != null) {
|
||||||
if (connectorType == null) {
|
if (connectorType == null) {
|
||||||
@@ -369,9 +386,6 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (null == jmxInfo) {
|
|
||||||
return Result.buildFailure(VC_ITEM_JMX_NOT_EXIST);
|
|
||||||
}
|
|
||||||
String jmxObjectName=String.format(jmxInfo.getJmxObjectName(), connectorName, taskId);
|
String jmxObjectName=String.format(jmxInfo.getJmxObjectName(), connectorName, taskId);
|
||||||
|
|
||||||
JmxConnectorWrap jmxConnectorWrap = connectJMXClient.getClientWithCheck(connectClusterId, workerId);
|
JmxConnectorWrap jmxConnectorWrap = connectJMXClient.getClientWithCheck(connectClusterId, workerId);
|
||||||
@@ -382,8 +396,7 @@ public class ConnectorMetricServiceImpl extends BaseConnectorMetricService imple
|
|||||||
try {
|
try {
|
||||||
//2、获取jmx指标
|
//2、获取jmx指标
|
||||||
String value = jmxConnectorWrap.getAttribute(new ObjectName(jmxObjectName), jmxInfo.getJmxAttribute()).toString();
|
String value = jmxConnectorWrap.getAttribute(new ObjectName(jmxObjectName), jmxInfo.getJmxAttribute()).toString();
|
||||||
ConnectorTaskMetrics connectorTaskMetrics = ConnectorTaskMetrics.initWithMetric(connectClusterId, connectorName, taskId, metric, Float.valueOf(value));
|
return Result.buildSuc(new ConnectorTaskMetrics(connectClusterId, connectorName, taskId, metric, Float.valueOf(value)));
|
||||||
return Result.buildSuc(connectorTaskMetrics);
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("method=getConnectorTaskMetric||connectClusterId={}||workerId={}||connectorName={}||taskId={}||metrics={}||jmx={}||msg={}",
|
LOGGER.error("method=getConnectorTaskMetric||connectClusterId={}||workerId={}||connectorName={}||taskId={}||metrics={}||jmx={}||msg={}",
|
||||||
connectClusterId, workerId, connectorName, taskId, metric, jmxObjectName, e.getClass().getName());
|
connectClusterId, workerId, connectorName, taskId, metric, jmxObjectName, e.getClass().getName());
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ package com.xiaojukeji.know.streaming.km.core.service.connect.connector.impl;
|
|||||||
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||||
import com.didiglobal.logi.log.ILog;
|
import com.didiglobal.logi.log.ILog;
|
||||||
import com.didiglobal.logi.log.LogFactory;
|
import com.didiglobal.logi.log.LogFactory;
|
||||||
import com.didiglobal.logi.security.common.dto.oplog.OplogDTO;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorInfo;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorInfo;
|
||||||
@@ -13,19 +12,14 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
|
|||||||
import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO;
|
import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.component.RestTool;
|
import com.xiaojukeji.know.streaming.km.common.component.RestTool;
|
||||||
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
||||||
import com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.converter.ConnectConverter;
|
import com.xiaojukeji.know.streaming.km.common.converter.ConnectConverter;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum;
|
import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectorTypeEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.ModuleEnum;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.OperationEnum;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.BackoffUtils;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.Triple;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.Tuple;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService;
|
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService;
|
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.ConnectorDAO;
|
import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.ConnectorDAO;
|
||||||
import org.apache.kafka.connect.runtime.rest.entities.ActiveTopicsInfo;
|
import org.apache.kafka.connect.runtime.rest.entities.ActiveTopicsInfo;
|
||||||
import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo;
|
import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo;
|
||||||
@@ -34,14 +28,9 @@ import org.springframework.dao.DuplicateKeyException;
|
|||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant.MIRROR_MAKER_SOURCE_CLUSTER_BOOTSTRAP_SERVERS_FIELD_NAME;
|
|
||||||
import static com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant.MIRROR_MAKER_TARGET_CLUSTER_BOOTSTRAP_SERVERS_FIELD_NAME;
|
|
||||||
import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.SERVICE_OP_CONNECT_CONNECTOR;
|
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
public class ConnectorServiceImpl extends BaseVersionControlService implements ConnectorService {
|
public class ConnectorServiceImpl implements ConnectorService {
|
||||||
private static final ILog LOGGER = LogFactory.getLog(ConnectorServiceImpl.class);
|
private static final ILog LOGGER = LogFactory.getLog(ConnectorServiceImpl.class);
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
@@ -53,79 +42,14 @@ public class ConnectorServiceImpl extends BaseVersionControlService implements C
|
|||||||
@Autowired
|
@Autowired
|
||||||
private ConnectClusterService connectClusterService;
|
private ConnectClusterService connectClusterService;
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private OpLogWrapService opLogWrapService;
|
|
||||||
|
|
||||||
private static final String LIST_CONNECTORS_URI = "/connectors";
|
private static final String LIST_CONNECTORS_URI = "/connectors";
|
||||||
private static final String GET_CONNECTOR_INFO_PREFIX_URI = "/connectors";
|
private static final String GET_CONNECTOR_INFO_PREFIX_URI = "/connectors";
|
||||||
private static final String GET_CONNECTOR_TOPICS_URI = "/connectors/%s/topics";
|
private static final String GET_CONNECTOR_TOPICS_URI = "/connectors/%s/topics";
|
||||||
private static final String GET_CONNECTOR_STATUS_URI = "/connectors/%s/status";
|
private static final String GET_CONNECTOR_STATUS_URI = "/connectors/%s/status";
|
||||||
|
|
||||||
private static final String CREATE_CONNECTOR_URI = "/connectors";
|
|
||||||
private static final String RESUME_CONNECTOR_URI = "/connectors/%s/resume";
|
|
||||||
private static final String RESTART_CONNECTOR_URI = "/connectors/%s/restart";
|
|
||||||
private static final String PAUSE_CONNECTOR_URI = "/connectors/%s/pause";
|
|
||||||
private static final String DELETE_CONNECTOR_URI = "/connectors/%s";
|
|
||||||
private static final String UPDATE_CONNECTOR_CONFIG_URI = "/connectors/%s/config";
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected VersionItemTypeEnum getVersionItemType() {
|
public Result<List<String>> listConnectorsFromCluster(ConnectCluster connectCluster) {
|
||||||
return SERVICE_OP_CONNECT_CONNECTOR;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Result<KSConnectorInfo> createConnector(Long connectClusterId, String connectorName, Properties configs, String operator) {
|
|
||||||
try {
|
try {
|
||||||
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
|
||||||
if (ValidateUtils.isNull(connectCluster)) {
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
|
||||||
}
|
|
||||||
|
|
||||||
// 构造参数
|
|
||||||
Properties props = new Properties();
|
|
||||||
props.put(KafkaConnectConstant.MIRROR_MAKER_NAME_FIELD_NAME, connectorName);
|
|
||||||
props.put("config", configs);
|
|
||||||
|
|
||||||
ConnectorInfo connectorInfo = restTool.postObjectWithJsonContent(
|
|
||||||
connectCluster.getSuitableRequestUrl() + CREATE_CONNECTOR_URI,
|
|
||||||
props,
|
|
||||||
ConnectorInfo.class
|
|
||||||
);
|
|
||||||
|
|
||||||
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
|
||||||
operator,
|
|
||||||
OperationEnum.ADD.getDesc(),
|
|
||||||
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
|
||||||
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
|
||||||
ConvertUtil.obj2Json(configs)
|
|
||||||
));
|
|
||||||
|
|
||||||
KSConnectorInfo connector = new KSConnectorInfo();
|
|
||||||
connector.setConnectClusterId(connectClusterId);
|
|
||||||
connector.setConfig(connectorInfo.config());
|
|
||||||
connector.setName(connectorInfo.name());
|
|
||||||
connector.setTasks(connectorInfo.tasks());
|
|
||||||
connector.setType(connectorInfo.type());
|
|
||||||
|
|
||||||
return Result.buildSuc(connector);
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOGGER.error(
|
|
||||||
"method=createConnector||connectClusterId={}||connectorName={}||configs={}||operator={}||errMsg=exception",
|
|
||||||
connectClusterId, connectorName, configs, operator, e
|
|
||||||
);
|
|
||||||
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Result<List<String>> listConnectorsFromCluster(Long connectClusterId) {
|
|
||||||
try {
|
|
||||||
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
|
||||||
if (ValidateUtils.isNull(connectCluster)) {
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
|
||||||
}
|
|
||||||
|
|
||||||
List<String> nameList = restTool.getArrayObjectWithJsonContent(
|
List<String> nameList = restTool.getArrayObjectWithJsonContent(
|
||||||
connectCluster.getSuitableRequestUrl() + LIST_CONNECTORS_URI,
|
connectCluster.getSuitableRequestUrl() + LIST_CONNECTORS_URI,
|
||||||
new HashMap<>(),
|
new HashMap<>(),
|
||||||
@@ -135,8 +59,8 @@ public class ConnectorServiceImpl extends BaseVersionControlService implements C
|
|||||||
return Result.buildSuc(nameList);
|
return Result.buildSuc(nameList);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error(
|
LOGGER.error(
|
||||||
"method=listConnectorsFromCluster||connectClusterId={}||errMsg=exception",
|
"method=listConnectorsFromCluster||connectClusterId={}||connectClusterSuitableUrl={}||errMsg=exception",
|
||||||
connectClusterId, e
|
connectCluster.getId(), connectCluster.getSuitableRequestUrl(), e
|
||||||
);
|
);
|
||||||
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
||||||
@@ -153,16 +77,6 @@ public class ConnectorServiceImpl extends BaseVersionControlService implements C
|
|||||||
return this.getConnectorInfoFromCluster(connectCluster, connectorName);
|
return this.getConnectorInfoFromCluster(connectCluster, connectorName);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Result<List<String>> getConnectorTopicsFromCluster(Long connectClusterId, String connectorName) {
|
|
||||||
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
|
||||||
if (ValidateUtils.isNull(connectCluster)) {
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.getConnectorTopicsFromCluster(connectCluster, connectorName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Result<KSConnectorStateInfo> getConnectorStateInfoFromCluster(Long connectClusterId, String connectorName) {
|
public Result<KSConnectorStateInfo> getConnectorStateInfoFromCluster(Long connectClusterId, String connectorName) {
|
||||||
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
||||||
@@ -174,270 +88,26 @@ public class ConnectorServiceImpl extends BaseVersionControlService implements C
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Result<KSConnector> getAllConnectorInfoFromCluster(Long connectClusterId, String connectorName) {
|
public Result<KSConnector> getConnectorFromKafka(Long connectClusterId, String connectorName) {
|
||||||
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
||||||
if (ValidateUtils.isNull(connectCluster)) {
|
if (ValidateUtils.isNull(connectCluster)) {
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
||||||
}
|
}
|
||||||
|
|
||||||
Result<KSConnectorInfo> connectorResult = this.getConnectorInfoFromCluster(connectCluster, connectorName);
|
Result<Triple<KSConnectorInfo, List<String>, KSConnectorStateInfo>> fullInfoResult = this.getConnectorFullInfoFromKafka(connectCluster, connectorName);
|
||||||
if (connectorResult.failed()) {
|
if (fullInfoResult.failed()) {
|
||||||
LOGGER.error(
|
return Result.buildFromIgnoreData(fullInfoResult);
|
||||||
"method=getAllConnectorInfoFromCluster||connectClusterId={}||connectorName={}||result={}",
|
|
||||||
connectClusterId, connectorName, connectorResult
|
|
||||||
);
|
|
||||||
|
|
||||||
return Result.buildFromIgnoreData(connectorResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
Result<List<String>> topicNameListResult = this.getConnectorTopicsFromCluster(connectCluster, connectorName);
|
|
||||||
if (topicNameListResult.failed()) {
|
|
||||||
LOGGER.error(
|
|
||||||
"method=getAllConnectorInfoFromCluster||connectClusterId={}||connectorName={}||result={}",
|
|
||||||
connectClusterId, connectorName, connectorResult
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Result<KSConnectorStateInfo> stateInfoResult = this.getConnectorStateInfoFromCluster(connectCluster, connectorName);
|
|
||||||
if (stateInfoResult.failed()) {
|
|
||||||
LOGGER.error(
|
|
||||||
"method=getAllConnectorInfoFromCluster||connectClusterId={}||connectorName={}||result={}",
|
|
||||||
connectClusterId, connectorName, connectorResult
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return Result.buildSuc(ConnectConverter.convert2KSConnector(
|
return Result.buildSuc(ConnectConverter.convert2KSConnector(
|
||||||
connectCluster.getKafkaClusterPhyId(),
|
connectCluster.getKafkaClusterPhyId(),
|
||||||
connectCluster.getId(),
|
connectCluster.getId(),
|
||||||
connectorResult.getData(),
|
fullInfoResult.getData().v1(),
|
||||||
stateInfoResult.getData(),
|
fullInfoResult.getData().v3(),
|
||||||
topicNameListResult.getData()
|
fullInfoResult.getData().v2()
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Result<Void> resumeConnector(Long connectClusterId, String connectorName, String operator) {
|
|
||||||
try {
|
|
||||||
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
|
||||||
if (ValidateUtils.isNull(connectCluster)) {
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
|
||||||
}
|
|
||||||
|
|
||||||
restTool.putJsonForObject(
|
|
||||||
connectCluster.getSuitableRequestUrl() + String.format(RESUME_CONNECTOR_URI, connectorName),
|
|
||||||
new HashMap<>(),
|
|
||||||
String.class
|
|
||||||
);
|
|
||||||
|
|
||||||
this.updateStatus(connectCluster, connectClusterId, connectorName);
|
|
||||||
|
|
||||||
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
|
||||||
operator,
|
|
||||||
OperationEnum.ENABLE.getDesc(),
|
|
||||||
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
|
||||||
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
|
||||||
""
|
|
||||||
));
|
|
||||||
|
|
||||||
return Result.buildSuc();
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOGGER.error(
|
|
||||||
"class=ConnectorServiceImpl||method=resumeConnector||connectClusterId={}||errMsg=exception",
|
|
||||||
connectClusterId, e
|
|
||||||
);
|
|
||||||
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Result<Void> restartConnector(Long connectClusterId, String connectorName, String operator) {
|
|
||||||
try {
|
|
||||||
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
|
||||||
if (ValidateUtils.isNull(connectCluster)) {
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
|
||||||
}
|
|
||||||
|
|
||||||
restTool.postObjectWithJsonContent(
|
|
||||||
connectCluster.getSuitableRequestUrl() + String.format(RESTART_CONNECTOR_URI, connectorName),
|
|
||||||
new HashMap<>(),
|
|
||||||
String.class
|
|
||||||
);
|
|
||||||
|
|
||||||
this.updateStatus(connectCluster, connectClusterId, connectorName);
|
|
||||||
|
|
||||||
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
|
||||||
operator,
|
|
||||||
OperationEnum.RESTART.getDesc(),
|
|
||||||
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
|
||||||
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
|
||||||
""
|
|
||||||
));
|
|
||||||
|
|
||||||
return Result.buildSuc();
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOGGER.error(
|
|
||||||
"method=restartConnector||connectClusterId={}||errMsg=exception",
|
|
||||||
connectClusterId, e
|
|
||||||
);
|
|
||||||
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Result<Void> stopConnector(Long connectClusterId, String connectorName, String operator) {
|
|
||||||
try {
|
|
||||||
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
|
||||||
if (ValidateUtils.isNull(connectCluster)) {
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
|
||||||
}
|
|
||||||
|
|
||||||
restTool.putJsonForObject(
|
|
||||||
connectCluster.getSuitableRequestUrl() + String.format(PAUSE_CONNECTOR_URI, connectorName),
|
|
||||||
new HashMap<>(),
|
|
||||||
String.class
|
|
||||||
);
|
|
||||||
|
|
||||||
this.updateStatus(connectCluster, connectClusterId, connectorName);
|
|
||||||
|
|
||||||
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
|
||||||
operator,
|
|
||||||
OperationEnum.DISABLE.getDesc(),
|
|
||||||
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
|
||||||
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
|
||||||
""
|
|
||||||
));
|
|
||||||
|
|
||||||
return Result.buildSuc();
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOGGER.error(
|
|
||||||
"method=stopConnector||connectClusterId={}||errMsg=exception",
|
|
||||||
connectClusterId, e
|
|
||||||
);
|
|
||||||
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Result<Void> deleteConnector(Long connectClusterId, String connectorName, String operator) {
|
|
||||||
try {
|
|
||||||
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
|
||||||
if (ValidateUtils.isNull(connectCluster)) {
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
|
||||||
}
|
|
||||||
|
|
||||||
restTool.deleteWithParamsAndHeader(
|
|
||||||
connectCluster.getSuitableRequestUrl() + String.format(DELETE_CONNECTOR_URI, connectorName),
|
|
||||||
new HashMap<>(),
|
|
||||||
new HashMap<>(),
|
|
||||||
String.class
|
|
||||||
);
|
|
||||||
|
|
||||||
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
|
||||||
operator,
|
|
||||||
OperationEnum.DELETE.getDesc(),
|
|
||||||
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
|
||||||
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
|
||||||
""
|
|
||||||
));
|
|
||||||
|
|
||||||
this.deleteConnectorInDB(connectClusterId, connectorName);
|
|
||||||
|
|
||||||
return Result.buildSuc();
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOGGER.error(
|
|
||||||
"method=deleteConnector||connectClusterId={}||errMsg=exception",
|
|
||||||
connectClusterId, e
|
|
||||||
);
|
|
||||||
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Result<Void> updateConnectorConfig(Long connectClusterId, String connectorName, Properties configs, String operator) {
|
|
||||||
try {
|
|
||||||
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
|
||||||
if (ValidateUtils.isNull(connectCluster)) {
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
|
||||||
}
|
|
||||||
|
|
||||||
ConnectorInfo connectorInfo = restTool.putJsonForObject(
|
|
||||||
connectCluster.getSuitableRequestUrl() + String.format(UPDATE_CONNECTOR_CONFIG_URI, connectorName),
|
|
||||||
configs,
|
|
||||||
org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo.class
|
|
||||||
);
|
|
||||||
|
|
||||||
this.updateStatus(connectCluster, connectClusterId, connectorName);
|
|
||||||
|
|
||||||
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
|
||||||
operator,
|
|
||||||
OperationEnum.EDIT.getDesc(),
|
|
||||||
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
|
||||||
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
|
||||||
ConvertUtil.obj2Json(configs)
|
|
||||||
));
|
|
||||||
|
|
||||||
return Result.buildSuc();
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOGGER.error(
|
|
||||||
"method=updateConnectorConfig||connectClusterId={}||errMsg=exception",
|
|
||||||
connectClusterId, e
|
|
||||||
);
|
|
||||||
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void batchReplace(Long kafkaClusterPhyId, Long connectClusterId, List<KSConnector> connectorList, Set<String> allConnectorNameSet) {
|
|
||||||
List<ConnectorPO> poList = this.listByConnectClusterIdFromDB(connectClusterId);
|
|
||||||
|
|
||||||
Map<String, ConnectorPO> oldPOMap = new HashMap<>();
|
|
||||||
poList.forEach(elem -> oldPOMap.put(elem.getConnectorName(), elem));
|
|
||||||
|
|
||||||
for (KSConnector connector: connectorList) {
|
|
||||||
try {
|
|
||||||
ConnectorPO oldPO = oldPOMap.remove(connector.getConnectorName());
|
|
||||||
if (oldPO == null) {
|
|
||||||
oldPO = ConvertUtil.obj2Obj(connector, ConnectorPO.class);
|
|
||||||
connectorDAO.insert(oldPO);
|
|
||||||
} else {
|
|
||||||
ConnectorPO newPO = ConvertUtil.obj2Obj(connector, ConnectorPO.class);
|
|
||||||
newPO.setId(oldPO.getId());
|
|
||||||
connectorDAO.updateById(newPO);
|
|
||||||
}
|
|
||||||
} catch (DuplicateKeyException dke) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
oldPOMap.values().forEach(elem -> {
|
|
||||||
if (allConnectorNameSet.contains(elem.getConnectorName())) {
|
|
||||||
// 当前connector还存在
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 当前connector不存在了,则进行删除
|
|
||||||
connectorDAO.deleteById(elem.getId());
|
|
||||||
});
|
|
||||||
} catch (Exception e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void addNewToDB(KSConnector connector) {
|
|
||||||
try {
|
|
||||||
connectorDAO.insert(ConvertUtil.obj2Obj(connector, ConnectorPO.class));
|
|
||||||
} catch (DuplicateKeyException dke) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<ConnectorPO> listByKafkaClusterIdFromDB(Long kafkaClusterPhyId) {
|
public List<ConnectorPO> listByKafkaClusterIdFromDB(Long kafkaClusterPhyId) {
|
||||||
LambdaQueryWrapper<ConnectorPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
LambdaQueryWrapper<ConnectorPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
||||||
@@ -482,53 +152,98 @@ public class ConnectorServiceImpl extends BaseVersionControlService implements C
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void completeMirrorMakerInfo(ConnectCluster connectCluster, List<KSConnector> connectorList) {
|
public Result<Tuple<Set<String>, List<KSConnector>>> getDataFromKafka(ConnectCluster connectCluster) {
|
||||||
List<KSConnector> sourceConnectorList = connectorList.stream().filter(elem -> elem.getConnectorClassName().equals(KafkaConnectConstant.MIRROR_MAKER_SOURCE_CONNECTOR_TYPE)).collect(Collectors.toList());
|
Result<List<String>> nameListResult = this.listConnectorsFromCluster(connectCluster);
|
||||||
if (sourceConnectorList.isEmpty()) {
|
if (nameListResult.failed()) {
|
||||||
return;
|
return Result.buildFromIgnoreData(nameListResult);
|
||||||
}
|
}
|
||||||
|
|
||||||
List<KSConnector> heartBeatConnectorList = connectorList.stream().filter(elem -> elem.getConnectorClassName().equals(KafkaConnectConstant.MIRROR_MAKER_HEARTBEAT_CONNECTOR_TYPE)).collect(Collectors.toList());
|
// 逐个获取
|
||||||
List<KSConnector> checkpointConnectorList = connectorList.stream().filter(elem -> elem.getConnectorClassName().equals(KafkaConnectConstant.MIRROR_MAKER_CHECKPOINT_CONNECTOR_TYPE)).collect(Collectors.toList());
|
List<Triple<KSConnectorInfo, List<String>, KSConnectorStateInfo>> connectorFullInfoList = new ArrayList<>();
|
||||||
|
for (String connectorName: nameListResult.getData()) {
|
||||||
Map<String, String> heartbeatMap = this.buildMirrorMakerMap(connectCluster, heartBeatConnectorList);
|
Result<Triple<KSConnectorInfo, List<String>, KSConnectorStateInfo>> ksConnectorResult = this.getConnectorFullInfoFromKafka(connectCluster, connectorName);
|
||||||
Map<String, String> checkpointMap = this.buildMirrorMakerMap(connectCluster, checkpointConnectorList);
|
if (ksConnectorResult.failed()) {
|
||||||
|
|
||||||
for (KSConnector sourceConnector : sourceConnectorList) {
|
|
||||||
Result<KSConnectorInfo> ret = this.getConnectorInfoFromCluster(connectCluster, sourceConnector.getConnectorName());
|
|
||||||
|
|
||||||
if (!ret.hasData()) {
|
|
||||||
LOGGER.error(
|
|
||||||
"method=completeMirrorMakerInfo||connectClusterId={}||connectorName={}||get connectorInfo fail!",
|
|
||||||
connectCluster.getId(), sourceConnector.getConnectorName()
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
KSConnectorInfo ksConnectorInfo = ret.getData();
|
|
||||||
String targetServers = ksConnectorInfo.getConfig().get(MIRROR_MAKER_TARGET_CLUSTER_BOOTSTRAP_SERVERS_FIELD_NAME);
|
|
||||||
String sourceServers = ksConnectorInfo.getConfig().get(MIRROR_MAKER_SOURCE_CLUSTER_BOOTSTRAP_SERVERS_FIELD_NAME);
|
|
||||||
|
|
||||||
if (ValidateUtils.anyBlank(targetServers, sourceServers)) {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
String[] targetBrokerList = getBrokerList(targetServers);
|
connectorFullInfoList.add(ksConnectorResult.getData());
|
||||||
String[] sourceBrokerList = getBrokerList(sourceServers);
|
|
||||||
sourceConnector.setHeartbeatConnectorName(this.findBindConnector(targetBrokerList, sourceBrokerList, heartbeatMap));
|
|
||||||
sourceConnector.setCheckpointConnectorName(this.findBindConnector(targetBrokerList, sourceBrokerList, checkpointMap));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 返回结果
|
||||||
|
return Result.buildSuc(new Tuple<>(
|
||||||
|
new HashSet<>(nameListResult.getData()),
|
||||||
|
ConnectConverter.convertAndSupplyMirrorMakerInfo(connectCluster, connectorFullInfoList)) // 转换并补充mm2相关信息
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**************************************************** private method ****************************************************/
|
@Override
|
||||||
private int deleteConnectorInDB(Long connectClusterId, String connectorName) {
|
public void writeToDB(Long connectClusterId, Set<String> fullNameSet, List<KSConnector> dataList) {
|
||||||
|
List<ConnectorPO> poList = this.listByConnectClusterIdFromDB(connectClusterId);
|
||||||
|
|
||||||
|
Map<String, ConnectorPO> oldPOMap = new HashMap<>();
|
||||||
|
poList.forEach(elem -> oldPOMap.put(elem.getConnectorName(), elem));
|
||||||
|
|
||||||
|
for (KSConnector connector: dataList) {
|
||||||
|
try {
|
||||||
|
ConnectorPO oldPO = oldPOMap.remove(connector.getConnectorName());
|
||||||
|
if (oldPO == null) {
|
||||||
|
oldPO = ConvertUtil.obj2Obj(connector, ConnectorPO.class);
|
||||||
|
connectorDAO.insert(oldPO);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
ConnectorPO newPO = ConvertUtil.obj2Obj(connector, ConnectorPO.class);
|
||||||
|
newPO.setId(oldPO.getId());
|
||||||
|
if (!ValidateUtils.isBlank(oldPO.getCheckpointConnectorName())
|
||||||
|
&& ValidateUtils.isBlank(newPO.getCheckpointConnectorName())
|
||||||
|
&& fullNameSet.contains(oldPO.getCheckpointConnectorName())) {
|
||||||
|
// 新的po里面没有checkpoint的信息,但是db中的数据显示有,且集群中有该connector,则保留该checkpoint数据
|
||||||
|
newPO.setCheckpointConnectorName(oldPO.getCheckpointConnectorName());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!ValidateUtils.isBlank(oldPO.getHeartbeatConnectorName())
|
||||||
|
&& ValidateUtils.isBlank(newPO.getHeartbeatConnectorName())
|
||||||
|
&& fullNameSet.contains(oldPO.getHeartbeatConnectorName())) {
|
||||||
|
// 新的po里面没有checkpoint的信息,但是db中的数据显示有,且集群中有该connector,则保留该checkpoint数据
|
||||||
|
newPO.setHeartbeatConnectorName(oldPO.getHeartbeatConnectorName());
|
||||||
|
}
|
||||||
|
|
||||||
|
connectorDAO.updateById(newPO);
|
||||||
|
} catch (DuplicateKeyException dke) {
|
||||||
|
// ignore
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error(
|
||||||
|
"method=writeToDB||connectClusterId={}||connectorName={}||errMsg=exception",
|
||||||
|
connector.getConnectClusterId(), connector.getConnectorName(), e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
oldPOMap.values().forEach(elem -> {
|
||||||
|
if (fullNameSet.contains(elem.getConnectorName())) {
|
||||||
|
// 当前connector还存在
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 当前connector不存在了,则进行删除
|
||||||
|
connectorDAO.deleteById(elem.getId());
|
||||||
|
});
|
||||||
|
} catch (Exception e) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int deleteInDBByKafkaClusterId(Long clusterPhyId) {
|
||||||
LambdaQueryWrapper<ConnectorPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
LambdaQueryWrapper<ConnectorPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
||||||
lambdaQueryWrapper.eq(ConnectorPO::getConnectClusterId, connectClusterId);
|
lambdaQueryWrapper.eq(ConnectorPO::getKafkaClusterPhyId, clusterPhyId);
|
||||||
lambdaQueryWrapper.eq(ConnectorPO::getConnectorName, connectorName);
|
|
||||||
|
|
||||||
return connectorDAO.delete(lambdaQueryWrapper);
|
return connectorDAO.delete(lambdaQueryWrapper);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**************************************************** private method ****************************************************/
|
||||||
|
|
||||||
private Result<KSConnectorInfo> getConnectorInfoFromCluster(ConnectCluster connectCluster, String connectorName) {
|
private Result<KSConnectorInfo> getConnectorInfoFromCluster(ConnectCluster connectCluster, String connectorName) {
|
||||||
try {
|
try {
|
||||||
ConnectorInfo connectorInfo = restTool.getForObject(
|
ConnectorInfo connectorInfo = restTool.getForObject(
|
||||||
@@ -594,90 +309,37 @@ public class ConnectorServiceImpl extends BaseVersionControlService implements C
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateStatus(ConnectCluster connectCluster, Long connectClusterId, String connectorName) {
|
private Result<Triple<KSConnectorInfo, List<String>, KSConnectorStateInfo>> getConnectorFullInfoFromKafka(ConnectCluster connectCluster, String connectorName) {
|
||||||
try {
|
Result<KSConnectorInfo> connectorResult = this.getConnectorInfoFromCluster(connectCluster, connectorName);
|
||||||
// 延迟3秒
|
if (connectorResult.failed()) {
|
||||||
BackoffUtils.backoff(2000);
|
|
||||||
|
|
||||||
Result<KSConnectorStateInfo> stateInfoResult = this.getConnectorStateInfoFromCluster(connectCluster, connectorName);
|
|
||||||
if (stateInfoResult.failed()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
ConnectorPO po = new ConnectorPO();
|
|
||||||
po.setConnectClusterId(connectClusterId);
|
|
||||||
po.setConnectorName(connectorName);
|
|
||||||
po.setState(stateInfoResult.getData().getConnector().getState());
|
|
||||||
|
|
||||||
LambdaQueryWrapper<ConnectorPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
|
||||||
lambdaQueryWrapper.eq(ConnectorPO::getConnectClusterId, connectClusterId);
|
|
||||||
lambdaQueryWrapper.eq(ConnectorPO::getConnectorName, connectorName);
|
|
||||||
|
|
||||||
connectorDAO.update(po, lambdaQueryWrapper);
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOGGER.error(
|
LOGGER.error(
|
||||||
"method=updateStatus||connectClusterId={}||connectorName={}||errMsg=exception",
|
"method=getConnectorAllInfoFromKafka||connectClusterId={}||connectClusterSuitableUrl={}||result={}||errMsg=get connectors info from cluster failed",
|
||||||
connectClusterId, connectorName, e
|
connectCluster.getId(), connectCluster.getSuitableRequestUrl(), connectorResult
|
||||||
|
);
|
||||||
|
|
||||||
|
return Result.buildFromIgnoreData(connectorResult);
|
||||||
|
}
|
||||||
|
|
||||||
|
Result<List<String>> topicNameListResult = this.getConnectorTopicsFromCluster(connectCluster, connectorName);
|
||||||
|
if (topicNameListResult.failed()) {
|
||||||
|
LOGGER.error(
|
||||||
|
"method=getConnectorAllInfoFromKafka||connectClusterId={}||connectClusterSuitableUrl={}||result={}||errMsg=get connectors topics from cluster failed",
|
||||||
|
connectCluster.getId(), connectCluster.getSuitableRequestUrl(), topicNameListResult
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
private Map<String, String> buildMirrorMakerMap(ConnectCluster connectCluster, List<KSConnector> ksConnectorList) {
|
|
||||||
Map<String, String> bindMap = new HashMap<>();
|
|
||||||
|
|
||||||
for (KSConnector ksConnector : ksConnectorList) {
|
|
||||||
Result<KSConnectorInfo> ret = this.getConnectorInfoFromCluster(connectCluster, ksConnector.getConnectorName());
|
|
||||||
|
|
||||||
if (!ret.hasData()) {
|
|
||||||
LOGGER.error(
|
|
||||||
"method=buildMirrorMakerMap||connectClusterId={}||connectorName={}||get connectorInfo fail!",
|
|
||||||
connectCluster.getId(), ksConnector.getConnectorName()
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
KSConnectorInfo ksConnectorInfo = ret.getData();
|
|
||||||
String targetServers = ksConnectorInfo.getConfig().get(MIRROR_MAKER_TARGET_CLUSTER_BOOTSTRAP_SERVERS_FIELD_NAME);
|
|
||||||
String sourceServers = ksConnectorInfo.getConfig().get(MIRROR_MAKER_SOURCE_CLUSTER_BOOTSTRAP_SERVERS_FIELD_NAME);
|
|
||||||
|
|
||||||
if (ValidateUtils.anyBlank(targetServers, sourceServers)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
String[] targetBrokerList = getBrokerList(targetServers);
|
|
||||||
String[] sourceBrokerList = getBrokerList(sourceServers);
|
|
||||||
for (String targetBroker : targetBrokerList) {
|
|
||||||
for (String sourceBroker : sourceBrokerList) {
|
|
||||||
bindMap.put(targetBroker + "@" + sourceBroker, ksConnector.getConnectorName());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
Result<KSConnectorStateInfo> stateInfoResult = this.getConnectorStateInfoFromCluster(connectCluster, connectorName);
|
||||||
|
if (stateInfoResult.failed()) {
|
||||||
|
LOGGER.error(
|
||||||
|
"method=getConnectorAllInfoFromKafka||connectClusterId={}||connectClusterSuitableUrl={}||result={}||errMsg=get connectors state from cluster failed",
|
||||||
|
connectCluster.getId(), connectCluster.getSuitableRequestUrl(), stateInfoResult
|
||||||
|
);
|
||||||
}
|
}
|
||||||
return bindMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
private String findBindConnector(String[] targetBrokerList, String[] sourceBrokerList, Map<String, String> connectorBindMap) {
|
return Result.buildSuc(new Triple<>(
|
||||||
for (String targetBroker : targetBrokerList) {
|
connectorResult.getData(),
|
||||||
for (String sourceBroker : sourceBrokerList) {
|
topicNameListResult.getData(),
|
||||||
String connectorName = connectorBindMap.get(targetBroker + "@" + sourceBroker);
|
stateInfoResult.getData()
|
||||||
if (connectorName != null) {
|
));
|
||||||
return connectorName;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
|
|
||||||
private String[] getBrokerList(String str) {
|
|
||||||
if (ValidateUtils.isBlank(str)) {
|
|
||||||
return new String[0];
|
|
||||||
}
|
|
||||||
if (str.contains(";")) {
|
|
||||||
return str.split(";");
|
|
||||||
}
|
|
||||||
if (str.contains(",")) {
|
|
||||||
return str.split(",");
|
|
||||||
}
|
|
||||||
return new String[]{str};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,352 @@
|
|||||||
|
package com.xiaojukeji.know.streaming.km.core.service.connect.connector.impl;
|
||||||
|
|
||||||
|
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||||
|
import com.didiglobal.logi.log.ILog;
|
||||||
|
import com.didiglobal.logi.log.LogFactory;
|
||||||
|
import com.didiglobal.logi.security.common.dto.oplog.OplogDTO;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorInfo;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnectorStateInfo;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.po.connect.ConnectorPO;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.component.RestTool;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.constant.connect.KafkaConnectConstant;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.ModuleEnum;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.enums.operaterecord.OperationEnum;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.BackoffUtils;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.connect.cluster.ConnectClusterService;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.OpConnectorService;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.oprecord.OpLogWrapService;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.version.BaseVersionControlService;
|
||||||
|
import com.xiaojukeji.know.streaming.km.persistence.mysql.connect.ConnectorDAO;
|
||||||
|
import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.dao.DuplicateKeyException;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemTypeEnum.SERVICE_OP_CONNECT_CONNECTOR;
|
||||||
|
|
||||||
|
@Service
|
||||||
|
public class OpConnectorServiceImpl extends BaseVersionControlService implements OpConnectorService {
|
||||||
|
private static final ILog LOGGER = LogFactory.getLog(OpConnectorServiceImpl.class);
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private RestTool restTool;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ConnectorDAO connectorDAO;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ConnectClusterService connectClusterService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private OpLogWrapService opLogWrapService;
|
||||||
|
|
||||||
|
private static final String GET_CONNECTOR_STATUS_URI = "/connectors/%s/status";
|
||||||
|
|
||||||
|
private static final String CREATE_CONNECTOR_URI = "/connectors";
|
||||||
|
private static final String RESUME_CONNECTOR_URI = "/connectors/%s/resume";
|
||||||
|
private static final String RESTART_CONNECTOR_URI = "/connectors/%s/restart";
|
||||||
|
private static final String PAUSE_CONNECTOR_URI = "/connectors/%s/pause";
|
||||||
|
private static final String DELETE_CONNECTOR_URI = "/connectors/%s";
|
||||||
|
private static final String UPDATE_CONNECTOR_CONFIG_URI = "/connectors/%s/config";
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected VersionItemTypeEnum getVersionItemType() {
|
||||||
|
return SERVICE_OP_CONNECT_CONNECTOR;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result<KSConnectorInfo> createConnector(Long connectClusterId, String connectorName, Properties configs, String operator) {
|
||||||
|
try {
|
||||||
|
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
||||||
|
if (ValidateUtils.isNull(connectCluster)) {
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 构造参数
|
||||||
|
Properties props = new Properties();
|
||||||
|
props.put(KafkaConnectConstant.MIRROR_MAKER_NAME_FIELD_NAME, connectorName);
|
||||||
|
props.put("config", configs);
|
||||||
|
|
||||||
|
ConnectorInfo connectorInfo = restTool.postObjectWithJsonContent(
|
||||||
|
connectCluster.getSuitableRequestUrl() + CREATE_CONNECTOR_URI,
|
||||||
|
props,
|
||||||
|
ConnectorInfo.class
|
||||||
|
);
|
||||||
|
|
||||||
|
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
||||||
|
operator,
|
||||||
|
OperationEnum.ADD.getDesc(),
|
||||||
|
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
||||||
|
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
||||||
|
ConvertUtil.obj2Json(configs)
|
||||||
|
));
|
||||||
|
|
||||||
|
KSConnectorInfo connector = new KSConnectorInfo();
|
||||||
|
connector.setConnectClusterId(connectClusterId);
|
||||||
|
connector.setConfig(connectorInfo.config());
|
||||||
|
connector.setName(connectorInfo.name());
|
||||||
|
connector.setTasks(connectorInfo.tasks());
|
||||||
|
connector.setType(connectorInfo.type());
|
||||||
|
|
||||||
|
return Result.buildSuc(connector);
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error(
|
||||||
|
"method=createConnector||connectClusterId={}||connectorName={}||configs={}||operator={}||errMsg=exception",
|
||||||
|
connectClusterId, connectorName, configs, operator, e
|
||||||
|
);
|
||||||
|
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result<Void> resumeConnector(Long connectClusterId, String connectorName, String operator) {
|
||||||
|
try {
|
||||||
|
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
||||||
|
if (ValidateUtils.isNull(connectCluster)) {
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
||||||
|
}
|
||||||
|
|
||||||
|
restTool.putJsonForObject(
|
||||||
|
connectCluster.getSuitableRequestUrl() + String.format(RESUME_CONNECTOR_URI, connectorName),
|
||||||
|
new HashMap<>(),
|
||||||
|
String.class
|
||||||
|
);
|
||||||
|
|
||||||
|
this.updateStatus(connectCluster, connectClusterId, connectorName);
|
||||||
|
|
||||||
|
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
||||||
|
operator,
|
||||||
|
OperationEnum.ENABLE.getDesc(),
|
||||||
|
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
||||||
|
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
||||||
|
""
|
||||||
|
));
|
||||||
|
|
||||||
|
return Result.buildSuc();
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error(
|
||||||
|
"class=ConnectorServiceImpl||method=resumeConnector||connectClusterId={}||errMsg=exception",
|
||||||
|
connectClusterId, e
|
||||||
|
);
|
||||||
|
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result<Void> restartConnector(Long connectClusterId, String connectorName, String operator) {
|
||||||
|
try {
|
||||||
|
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
||||||
|
if (ValidateUtils.isNull(connectCluster)) {
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
||||||
|
}
|
||||||
|
|
||||||
|
restTool.postObjectWithJsonContent(
|
||||||
|
connectCluster.getSuitableRequestUrl() + String.format(RESTART_CONNECTOR_URI, connectorName),
|
||||||
|
new HashMap<>(),
|
||||||
|
String.class
|
||||||
|
);
|
||||||
|
|
||||||
|
this.updateStatus(connectCluster, connectClusterId, connectorName);
|
||||||
|
|
||||||
|
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
||||||
|
operator,
|
||||||
|
OperationEnum.RESTART.getDesc(),
|
||||||
|
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
||||||
|
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
||||||
|
""
|
||||||
|
));
|
||||||
|
|
||||||
|
return Result.buildSuc();
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error(
|
||||||
|
"method=restartConnector||connectClusterId={}||errMsg=exception",
|
||||||
|
connectClusterId, e
|
||||||
|
);
|
||||||
|
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result<Void> stopConnector(Long connectClusterId, String connectorName, String operator) {
|
||||||
|
try {
|
||||||
|
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
||||||
|
if (ValidateUtils.isNull(connectCluster)) {
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
||||||
|
}
|
||||||
|
|
||||||
|
restTool.putJsonForObject(
|
||||||
|
connectCluster.getSuitableRequestUrl() + String.format(PAUSE_CONNECTOR_URI, connectorName),
|
||||||
|
new HashMap<>(),
|
||||||
|
String.class
|
||||||
|
);
|
||||||
|
|
||||||
|
this.updateStatus(connectCluster, connectClusterId, connectorName);
|
||||||
|
|
||||||
|
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
||||||
|
operator,
|
||||||
|
OperationEnum.DISABLE.getDesc(),
|
||||||
|
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
||||||
|
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
||||||
|
""
|
||||||
|
));
|
||||||
|
|
||||||
|
return Result.buildSuc();
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error(
|
||||||
|
"method=stopConnector||connectClusterId={}||errMsg=exception",
|
||||||
|
connectClusterId, e
|
||||||
|
);
|
||||||
|
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result<Void> deleteConnector(Long connectClusterId, String connectorName, String operator) {
|
||||||
|
try {
|
||||||
|
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
||||||
|
if (ValidateUtils.isNull(connectCluster)) {
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
||||||
|
}
|
||||||
|
|
||||||
|
restTool.deleteWithParamsAndHeader(
|
||||||
|
connectCluster.getSuitableRequestUrl() + String.format(DELETE_CONNECTOR_URI, connectorName),
|
||||||
|
new HashMap<>(),
|
||||||
|
new HashMap<>(),
|
||||||
|
String.class
|
||||||
|
);
|
||||||
|
|
||||||
|
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
||||||
|
operator,
|
||||||
|
OperationEnum.DELETE.getDesc(),
|
||||||
|
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
||||||
|
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
||||||
|
""
|
||||||
|
));
|
||||||
|
|
||||||
|
this.deleteConnectorInDB(connectClusterId, connectorName);
|
||||||
|
|
||||||
|
return Result.buildSuc();
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error(
|
||||||
|
"method=deleteConnector||connectClusterId={}||errMsg=exception",
|
||||||
|
connectClusterId, e
|
||||||
|
);
|
||||||
|
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result<Void> updateConnectorConfig(Long connectClusterId, String connectorName, Properties configs, String operator) {
|
||||||
|
try {
|
||||||
|
ConnectCluster connectCluster = connectClusterService.getById(connectClusterId);
|
||||||
|
if (ValidateUtils.isNull(connectCluster)) {
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getConnectClusterNotExist(connectClusterId));
|
||||||
|
}
|
||||||
|
|
||||||
|
ConnectorInfo connectorInfo = restTool.putJsonForObject(
|
||||||
|
connectCluster.getSuitableRequestUrl() + String.format(UPDATE_CONNECTOR_CONFIG_URI, connectorName),
|
||||||
|
configs,
|
||||||
|
ConnectorInfo.class
|
||||||
|
);
|
||||||
|
|
||||||
|
this.updateStatus(connectCluster, connectClusterId, connectorName);
|
||||||
|
|
||||||
|
opLogWrapService.saveOplogAndIgnoreException(new OplogDTO(
|
||||||
|
operator,
|
||||||
|
OperationEnum.EDIT.getDesc(),
|
||||||
|
ModuleEnum.KAFKA_CONNECT_CONNECTOR.getDesc(),
|
||||||
|
MsgConstant.getConnectorBizStr(connectClusterId, connectorName),
|
||||||
|
ConvertUtil.obj2Json(configs)
|
||||||
|
));
|
||||||
|
|
||||||
|
return Result.buildSuc();
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error(
|
||||||
|
"method=updateConnectorConfig||connectClusterId={}||errMsg=exception",
|
||||||
|
connectClusterId, e
|
||||||
|
);
|
||||||
|
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addNewToDB(KSConnector connector) {
|
||||||
|
try {
|
||||||
|
connectorDAO.insert(ConvertUtil.obj2Obj(connector, ConnectorPO.class));
|
||||||
|
} catch (DuplicateKeyException dke) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**************************************************** private method ****************************************************/
|
||||||
|
private int deleteConnectorInDB(Long connectClusterId, String connectorName) {
|
||||||
|
LambdaQueryWrapper<ConnectorPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
||||||
|
lambdaQueryWrapper.eq(ConnectorPO::getConnectClusterId, connectClusterId);
|
||||||
|
lambdaQueryWrapper.eq(ConnectorPO::getConnectorName, connectorName);
|
||||||
|
|
||||||
|
return connectorDAO.delete(lambdaQueryWrapper);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Result<KSConnectorStateInfo> getConnectorStateInfoFromCluster(ConnectCluster connectCluster, String connectorName) {
|
||||||
|
try {
|
||||||
|
KSConnectorStateInfo connectorStateInfo = restTool.getForObject(
|
||||||
|
connectCluster.getSuitableRequestUrl() + String.format(GET_CONNECTOR_STATUS_URI, connectorName),
|
||||||
|
new HashMap<>(),
|
||||||
|
KSConnectorStateInfo.class
|
||||||
|
);
|
||||||
|
|
||||||
|
return Result.buildSuc(connectorStateInfo);
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error(
|
||||||
|
"method=getConnectorStateInfoFromCluster||connectClusterId={}||connectorName={}||errMsg=exception",
|
||||||
|
connectCluster.getId(), connectorName, e
|
||||||
|
);
|
||||||
|
|
||||||
|
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_CONNECTOR_READ_FAILED, e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateStatus(ConnectCluster connectCluster, Long connectClusterId, String connectorName) {
|
||||||
|
try {
|
||||||
|
// 延迟3秒
|
||||||
|
BackoffUtils.backoff(2000);
|
||||||
|
|
||||||
|
Result<KSConnectorStateInfo> stateInfoResult = this.getConnectorStateInfoFromCluster(connectCluster, connectorName);
|
||||||
|
if (stateInfoResult.failed()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
ConnectorPO po = new ConnectorPO();
|
||||||
|
po.setConnectClusterId(connectClusterId);
|
||||||
|
po.setConnectorName(connectorName);
|
||||||
|
po.setState(stateInfoResult.getData().getConnector().getState());
|
||||||
|
|
||||||
|
LambdaQueryWrapper<ConnectorPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
||||||
|
lambdaQueryWrapper.eq(ConnectorPO::getConnectClusterId, connectClusterId);
|
||||||
|
lambdaQueryWrapper.eq(ConnectorPO::getConnectorName, connectorName);
|
||||||
|
|
||||||
|
connectorDAO.update(po, lambdaQueryWrapper);
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error(
|
||||||
|
"method=updateStatus||connectClusterId={}||connectorName={}||errMsg=exception",
|
||||||
|
connectClusterId, connectorName, e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -27,7 +27,7 @@ import com.xiaojukeji.know.streaming.km.common.utils.Tuple;
|
|||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.mm2.MirrorMakerMetricService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.mm2.MirrorMakerMetricService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.health.state.HealthStateService;
|
import com.xiaojukeji.know.streaming.km.core.service.health.state.HealthStateService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.version.BaseConnectorMetricService;
|
import com.xiaojukeji.know.streaming.km.core.service.version.BaseConnectMetricService;
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.connect.ConnectJMXClient;
|
import com.xiaojukeji.know.streaming.km.persistence.connect.ConnectJMXClient;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import com.xiaojukeji.know.streaming.km.persistence.es.dao.connect.mm2.MirrorMakerMetricESDAO;
|
import com.xiaojukeji.know.streaming.km.persistence.es.dao.connect.mm2.MirrorMakerMetricESDAO;
|
||||||
@@ -49,7 +49,7 @@ import static com.xiaojukeji.know.streaming.km.common.enums.version.VersionItemT
|
|||||||
* @date 2022/12/15
|
* @date 2022/12/15
|
||||||
*/
|
*/
|
||||||
@Service
|
@Service
|
||||||
public class MirrorMakerMetricServiceImpl extends BaseConnectorMetricService implements MirrorMakerMetricService {
|
public class MirrorMakerMetricServiceImpl extends BaseConnectMetricService implements MirrorMakerMetricService {
|
||||||
protected static final ILog LOGGER = LogFactory.getLog(MirrorMakerMetricServiceImpl.class);
|
protected static final ILog LOGGER = LogFactory.getLog(MirrorMakerMetricServiceImpl.class);
|
||||||
|
|
||||||
public static final String MIRROR_MAKER_METHOD_DO_NOTHING = "doNothing";
|
public static final String MIRROR_MAKER_METHOD_DO_NOTHING = "doNothing";
|
||||||
@@ -190,7 +190,7 @@ public class MirrorMakerMetricServiceImpl extends BaseConnectorMetricService imp
|
|||||||
|
|
||||||
multiLinesVO.setMetricLines(metricLines);
|
multiLinesVO.setMetricLines(metricLines);
|
||||||
multiLinesVOS.add(multiLinesVO);
|
multiLinesVOS.add(multiLinesVO);
|
||||||
}catch (Exception e){
|
} catch (Exception e){
|
||||||
LOGGER.error("method=metricMap2VO||connectClusterId={}||msg=exception!", connectClusterId, e);
|
LOGGER.error("method=metricMap2VO||connectClusterId={}||msg=exception!", connectClusterId, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -78,6 +78,7 @@ public class GroupServiceImpl extends BaseKafkaVersionControlService implements
|
|||||||
}
|
}
|
||||||
|
|
||||||
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, clusterPhy.getBootstrapServers());
|
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, clusterPhy.getBootstrapServers());
|
||||||
|
props.put(AdminClientConfig.CLIENT_ID_CONFIG, String.format("KSPartialAdminClient||clusterPhyId=%d||timestamp=%d", clusterPhy.getId(), System.currentTimeMillis()));
|
||||||
|
|
||||||
adminClient = KSPartialKafkaAdminClient.create(props);
|
adminClient = KSPartialKafkaAdminClient.create(props);
|
||||||
KSListGroupsResult listConsumerGroupsResult = adminClient.listConsumerGroups(
|
KSListGroupsResult listConsumerGroupsResult = adminClient.listConsumerGroups(
|
||||||
@@ -178,6 +179,7 @@ public class GroupServiceImpl extends BaseKafkaVersionControlService implements
|
|||||||
}
|
}
|
||||||
|
|
||||||
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, clusterPhy.getBootstrapServers());
|
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, clusterPhy.getBootstrapServers());
|
||||||
|
props.put(AdminClientConfig.CLIENT_ID_CONFIG, String.format("KSPartialAdminClient||clusterPhyId=%d||timestamp=%d", clusterPhy.getId(), System.currentTimeMillis()));
|
||||||
|
|
||||||
adminClient = KSPartialKafkaAdminClient.create(props);
|
adminClient = KSPartialKafkaAdminClient.create(props);
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,51 @@
|
|||||||
|
package com.xiaojukeji.know.streaming.km.core.service.meta;
|
||||||
|
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.Tuple;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Kafka元信息服务接口
|
||||||
|
*/
|
||||||
|
public interface MetaDataService<T> {
|
||||||
|
/**
|
||||||
|
* 从Kafka中获取数据
|
||||||
|
* @param connectCluster connect集群
|
||||||
|
* @return 全部资源列表, 成功的资源列表
|
||||||
|
*/
|
||||||
|
default Result<Tuple<Set<String>, List<T>>> getDataFromKafka(ConnectCluster connectCluster) { return Result.buildSuc(new Tuple<>(new HashSet<>(), new ArrayList<>())); }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 从Kafka中获取数据
|
||||||
|
* @param clusterPhy kafka集群
|
||||||
|
* @return 全部资源集合, 成功的资源列表
|
||||||
|
*/
|
||||||
|
default Result<List<T>> getDataFromKafka(ClusterPhy clusterPhy) { return Result.buildSuc(new ArrayList<>()); }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 元信息同步至DB中
|
||||||
|
* @param clusterId 集群ID
|
||||||
|
* @param fullResSet 全部资源列表
|
||||||
|
* @param dataList 成功的资源列表
|
||||||
|
*/
|
||||||
|
default void writeToDB(Long clusterId, Set<String> fullResSet, List<T> dataList) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 元信息同步至DB中
|
||||||
|
* @param clusterId 集群ID
|
||||||
|
* @param dataList 成功的资源列表
|
||||||
|
*/
|
||||||
|
default void writeToDB(Long clusterId, List<T> dataList) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 依据kafka集群ID删除数据
|
||||||
|
* @param clusterPhyId kafka集群ID
|
||||||
|
*/
|
||||||
|
int deleteInDBByKafkaClusterId(Long clusterPhyId);
|
||||||
|
}
|
||||||
@@ -9,7 +9,7 @@ import java.util.stream.Collectors;
|
|||||||
* @author wyb
|
* @author wyb
|
||||||
* @date 2022/11/9
|
* @date 2022/11/9
|
||||||
*/
|
*/
|
||||||
public abstract class BaseConnectorMetricService extends BaseConnectorVersionControlService{
|
public abstract class BaseConnectMetricService extends BaseConnectVersionControlService {
|
||||||
private List<String> metricNames = new ArrayList<>();
|
private List<String> metricNames = new ArrayList<>();
|
||||||
|
|
||||||
@PostConstruct
|
@PostConstruct
|
||||||
@@ -14,7 +14,7 @@ import javax.annotation.Nullable;
|
|||||||
* @author wyb
|
* @author wyb
|
||||||
* @date 2022/11/8
|
* @date 2022/11/8
|
||||||
*/
|
*/
|
||||||
public abstract class BaseConnectorVersionControlService extends BaseVersionControlService {
|
public abstract class BaseConnectVersionControlService extends BaseVersionControlService {
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
ConnectClusterService connectClusterService;
|
ConnectClusterService connectClusterService;
|
||||||
@@ -24,6 +24,8 @@ public class ConnectorMetricVersionItems extends BaseMetricVersionMetric {
|
|||||||
|
|
||||||
public static final String CONNECTOR_METRIC_HEALTH_STATE = "HealthState";
|
public static final String CONNECTOR_METRIC_HEALTH_STATE = "HealthState";
|
||||||
|
|
||||||
|
public static final String CONNECTOR_METRIC_RUNNING_STATUS = "RunningStatus";
|
||||||
|
|
||||||
public static final String CONNECTOR_METRIC_CONNECTOR_TOTAL_TASK_COUNT = "ConnectorTotalTaskCount";
|
public static final String CONNECTOR_METRIC_CONNECTOR_TOTAL_TASK_COUNT = "ConnectorTotalTaskCount";
|
||||||
|
|
||||||
public static final String CONNECTOR_METRIC_HEALTH_CHECK_PASSED = "HealthCheckPassed";
|
public static final String CONNECTOR_METRIC_HEALTH_CHECK_PASSED = "HealthCheckPassed";
|
||||||
@@ -128,6 +130,9 @@ public class ConnectorMetricVersionItems extends BaseMetricVersionMetric {
|
|||||||
items.add(buildAllVersionsItem()
|
items.add(buildAllVersionsItem()
|
||||||
.name(CONNECTOR_METRIC_HEALTH_STATE).unit("0:好 1:中 2:差 3:宕机").desc("健康状态(0:好 1:中 2:差 3:宕机)").category(CATEGORY_HEALTH)
|
.name(CONNECTOR_METRIC_HEALTH_STATE).unit("0:好 1:中 2:差 3:宕机").desc("健康状态(0:好 1:中 2:差 3:宕机)").category(CATEGORY_HEALTH)
|
||||||
.extendMethod(CONNECTOR_METHOD_GET_METRIC_HEALTH_SCORE));
|
.extendMethod(CONNECTOR_METHOD_GET_METRIC_HEALTH_SCORE));
|
||||||
|
items.add(buildAllVersionsItem()
|
||||||
|
.name(CONNECTOR_METRIC_RUNNING_STATUS).unit("0:UNASSIGNED 1:RUNNING 2:PAUSED 3:FAILED 4:DESTROYED -1:UNKNOWN").desc("运行状态(0:UNASSIGNED 1:RUNNING 2:PAUSED 3:FAILED 4:DESTROYED -1:UNKNOWN)").category(CATEGORY_PERFORMANCE)
|
||||||
|
.extendMethod(CONNECTOR_METHOD_GET_METRIC_RUNNING_STATUS));
|
||||||
items.add(buildAllVersionsItem()
|
items.add(buildAllVersionsItem()
|
||||||
.name(CONNECTOR_METRIC_HEALTH_CHECK_PASSED).unit("个").desc("健康项检查通过数").category(CATEGORY_HEALTH)
|
.name(CONNECTOR_METRIC_HEALTH_CHECK_PASSED).unit("个").desc("健康项检查通过数").category(CATEGORY_HEALTH)
|
||||||
.extendMethod(CONNECTOR_METHOD_GET_METRIC_HEALTH_SCORE));
|
.extendMethod(CONNECTOR_METHOD_GET_METRIC_HEALTH_SCORE));
|
||||||
|
|||||||
@@ -1,19 +1,11 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.core.service.zookeeper;
|
package com.xiaojukeji.know.streaming.km.core.service.zookeeper;
|
||||||
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.ZKConfig;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.zookeeper.ZookeeperInfo;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.zookeeper.ZookeeperInfo;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.meta.MetaDataService;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public interface ZookeeperService {
|
public interface ZookeeperService extends MetaDataService<ZookeeperInfo> {
|
||||||
/**
|
|
||||||
* 从ZK集群中获取ZK信息
|
|
||||||
*/
|
|
||||||
Result<List<ZookeeperInfo>> listFromZookeeper(Long clusterPhyId, String zookeeperAddress, ZKConfig zkConfig);
|
|
||||||
|
|
||||||
void batchReplaceDataInDB(Long clusterPhyId, List<ZookeeperInfo> infoList);
|
|
||||||
|
|
||||||
List<ZookeeperInfo> listFromDBByCluster(Long clusterPhyId);
|
List<ZookeeperInfo> listFromDBByCluster(Long clusterPhyId);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package com.xiaojukeji.know.streaming.km.core.service.zookeeper.impl;
|
|||||||
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||||
import com.didiglobal.logi.log.ILog;
|
import com.didiglobal.logi.log.ILog;
|
||||||
import com.didiglobal.logi.log.LogFactory;
|
import com.didiglobal.logi.log.LogFactory;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.ZKConfig;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.ZKConfig;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
|
||||||
@@ -22,10 +23,8 @@ import com.xiaojukeji.know.streaming.km.persistence.mysql.zookeeper.ZookeeperDAO
|
|||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.HashMap;
|
import java.util.stream.Collectors;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
public class ZookeeperServiceImpl implements ZookeeperService {
|
public class ZookeeperServiceImpl implements ZookeeperService {
|
||||||
@@ -35,14 +34,14 @@ public class ZookeeperServiceImpl implements ZookeeperService {
|
|||||||
private ZookeeperDAO zookeeperDAO;
|
private ZookeeperDAO zookeeperDAO;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Result<List<ZookeeperInfo>> listFromZookeeper(Long clusterPhyId, String zookeeperAddress, ZKConfig zkConfig) {
|
public Result<List<ZookeeperInfo>> getDataFromKafka(ClusterPhy clusterPhy) {
|
||||||
List<Tuple<String, Integer>> addressList = null;
|
List<Tuple<String, Integer>> addressList = null;
|
||||||
try {
|
try {
|
||||||
addressList = ZookeeperUtils.connectStringParser(zookeeperAddress);
|
addressList = ZookeeperUtils.connectStringParser(clusterPhy.getZookeeper());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error(
|
LOGGER.error(
|
||||||
"method=listFromZookeeperCluster||clusterPhyId={}||zookeeperAddress={}||errMsg=exception!",
|
"method=getDataFromKafka||clusterPhyId={}||zookeeperAddress={}||errMsg=exception!",
|
||||||
clusterPhyId, zookeeperAddress, e
|
clusterPhy.getId(), clusterPhy.getZookeeper(), e
|
||||||
);
|
);
|
||||||
|
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, e.getMessage());
|
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, e.getMessage());
|
||||||
@@ -51,24 +50,25 @@ public class ZookeeperServiceImpl implements ZookeeperService {
|
|||||||
List<ZookeeperInfo> aliveZKList = new ArrayList<>();
|
List<ZookeeperInfo> aliveZKList = new ArrayList<>();
|
||||||
for (Tuple<String, Integer> hostPort: addressList) {
|
for (Tuple<String, Integer> hostPort: addressList) {
|
||||||
aliveZKList.add(this.getFromZookeeperCluster(
|
aliveZKList.add(this.getFromZookeeperCluster(
|
||||||
clusterPhyId,
|
clusterPhy.getId(),
|
||||||
hostPort.getV1(),
|
hostPort.getV1(),
|
||||||
hostPort.getV2(),
|
hostPort.getV2(),
|
||||||
zkConfig
|
ConvertUtil.str2ObjByJson(clusterPhy.getZkProperties(), ZKConfig.class)
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
return Result.buildSuc(aliveZKList);
|
return Result.buildSuc(aliveZKList);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void batchReplaceDataInDB(Long clusterPhyId, List<ZookeeperInfo> infoList) {
|
public void writeToDB(Long clusterId, List<ZookeeperInfo> dataList) {
|
||||||
// DB 中的信息
|
// DB 中的信息
|
||||||
List<ZookeeperInfoPO> dbInfoList = this.listRawFromDBByCluster(clusterPhyId);
|
Map<String, ZookeeperInfoPO> dbMap = this.listRawFromDBByCluster(clusterId)
|
||||||
Map<String, ZookeeperInfoPO> dbMap = new HashMap<>();
|
.stream()
|
||||||
dbInfoList.stream().forEach(elem -> dbMap.put(elem.getHost() + elem.getPort(), elem));
|
.collect(Collectors.toMap(elem -> elem.getHost() + elem.getPort(), elem -> elem, (oldValue, newValue) -> newValue));
|
||||||
|
|
||||||
// 新获取到的信息
|
// 新获取到的信息
|
||||||
List<ZookeeperInfoPO> newInfoList = ConvertUtil.list2List(infoList, ZookeeperInfoPO.class);
|
List<ZookeeperInfoPO> newInfoList = ConvertUtil.list2List(dataList, ZookeeperInfoPO.class);
|
||||||
for (ZookeeperInfoPO newInfo: newInfoList) {
|
for (ZookeeperInfoPO newInfo: newInfoList) {
|
||||||
try {
|
try {
|
||||||
ZookeeperInfoPO oldInfo = dbMap.remove(newInfo.getHost() + newInfo.getPort());
|
ZookeeperInfoPO oldInfo = dbMap.remove(newInfo.getHost() + newInfo.getPort());
|
||||||
@@ -87,7 +87,7 @@ public class ZookeeperServiceImpl implements ZookeeperService {
|
|||||||
zookeeperDAO.updateById(newInfo);
|
zookeeperDAO.updateById(newInfo);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("method=batchReplaceDataInDB||clusterPhyId={}||newInfo={}||errMsg=exception", clusterPhyId, newInfo, e);
|
LOGGER.error("method=writeToDB||clusterPhyId={}||newInfo={}||errMsg=exception", clusterId, newInfo, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -96,11 +96,19 @@ public class ZookeeperServiceImpl implements ZookeeperService {
|
|||||||
try {
|
try {
|
||||||
zookeeperDAO.deleteById(entry.getValue().getId());
|
zookeeperDAO.deleteById(entry.getValue().getId());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("method=batchReplaceDataInDB||clusterPhyId={}||expiredInfo={}||errMsg=exception", clusterPhyId, entry.getValue(), e);
|
LOGGER.error("method=writeToDB||clusterPhyId={}||expiredInfo={}||errMsg=exception", clusterId, entry.getValue(), e);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int deleteInDBByKafkaClusterId(Long clusterPhyId) {
|
||||||
|
LambdaQueryWrapper<ZookeeperInfoPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
|
||||||
|
lambdaQueryWrapper.eq(ZookeeperInfoPO::getClusterPhyId, clusterPhyId);
|
||||||
|
|
||||||
|
return zookeeperDAO.delete(lambdaQueryWrapper);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<ZookeeperInfo> listFromDBByCluster(Long clusterPhyId) {
|
public List<ZookeeperInfo> listFromDBByCluster(Long clusterPhyId) {
|
||||||
return ConvertUtil.list2List(this.listRawFromDBByCluster(clusterPhyId), ZookeeperInfo.class);
|
return ConvertUtil.list2List(this.listRawFromDBByCluster(clusterPhyId), ZookeeperInfo.class);
|
||||||
|
|||||||
@@ -16,8 +16,8 @@ import com.xiaojukeji.know.streaming.km.account.KmAccountConfig;
|
|||||||
import com.xiaojukeji.know.streaming.km.account.common.bizenum.LoginServiceNameEnum;
|
import com.xiaojukeji.know.streaming.km.account.common.bizenum.LoginServiceNameEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.account.common.ldap.LdapPrincipal;
|
import com.xiaojukeji.know.streaming.km.account.common.ldap.LdapPrincipal;
|
||||||
import com.xiaojukeji.know.streaming.km.account.login.ldap.remote.LdapAuthentication;
|
import com.xiaojukeji.know.streaming.km.account.login.ldap.remote.LdapAuthentication;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.CommonUtils;
|
import com.xiaojukeji.know.streaming.km.common.utils.CommonUtils;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
@@ -79,7 +79,11 @@ public class LdapLoginServiceImpl implements LoginExtend {
|
|||||||
userService.addUser(userDTO, ldapAttrsInfo.getSAMAccountName());
|
userService.addUser(userDTO, ldapAttrsInfo.getSAMAccountName());
|
||||||
|
|
||||||
// user赋值
|
// user赋值
|
||||||
user = ConvertUtil.obj2Obj(userDTO, User.class);
|
user = userService.getUserByUserName(ldapAttrsInfo.getSAMAccountName());
|
||||||
|
} else if (ValidateUtils.isNull(user)) {
|
||||||
|
// user为空,且不自动注册用户时,赋值默认id给临时用户
|
||||||
|
user = new User();
|
||||||
|
user.setId(Constant.INVALID_CODE);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 记录登录状态
|
// 记录登录状态
|
||||||
|
|||||||
@@ -16,6 +16,11 @@ public enum MonitorSinkTagEnum {
|
|||||||
CONSUMER_GROUP("consumerGroup"),
|
CONSUMER_GROUP("consumerGroup"),
|
||||||
|
|
||||||
REPLICATION("replication"),
|
REPLICATION("replication"),
|
||||||
|
|
||||||
|
CONNECT_CLUSTER_ID("connectClusterId"),
|
||||||
|
|
||||||
|
CONNECT_CONNECTOR("connectConnector"),
|
||||||
|
|
||||||
;
|
;
|
||||||
|
|
||||||
private final String name;
|
private final String name;
|
||||||
|
|||||||
@@ -3,7 +3,9 @@ package com.xiaojukeji.know.streaming.km.monitor.component;
|
|||||||
import com.didiglobal.logi.log.ILog;
|
import com.didiglobal.logi.log.ILog;
|
||||||
import com.didiglobal.logi.log.LogFactory;
|
import com.didiglobal.logi.log.LogFactory;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.*;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.*;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.metrics.connect.ConnectorMetrics;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.*;
|
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.*;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.connect.ConnectorMetricEvent;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.FutureUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.FutureUtil;
|
||||||
import com.xiaojukeji.know.streaming.km.monitor.common.MetricSinkPoint;
|
import com.xiaojukeji.know.streaming.km.monitor.common.MetricSinkPoint;
|
||||||
import org.springframework.context.ApplicationListener;
|
import org.springframework.context.ApplicationListener;
|
||||||
@@ -59,6 +61,10 @@ public abstract class AbstractMonitorSinkService implements ApplicationListener<
|
|||||||
} else if(event instanceof ZookeeperMetricEvent) {
|
} else if(event instanceof ZookeeperMetricEvent) {
|
||||||
ZookeeperMetricEvent zookeeperMetricEvent = (ZookeeperMetricEvent)event;
|
ZookeeperMetricEvent zookeeperMetricEvent = (ZookeeperMetricEvent)event;
|
||||||
sinkMetrics(zookeeperMetric2SinkPoint(zookeeperMetricEvent.getZookeeperMetrics()));
|
sinkMetrics(zookeeperMetric2SinkPoint(zookeeperMetricEvent.getZookeeperMetrics()));
|
||||||
|
|
||||||
|
} else if (event instanceof ConnectorMetricEvent) {
|
||||||
|
ConnectorMetricEvent connectorMetricEvent = (ConnectorMetricEvent)event;
|
||||||
|
sinkMetrics(connectConnectorMetric2SinkPoint(connectorMetricEvent.getConnectorMetricsList()));
|
||||||
}
|
}
|
||||||
} );
|
} );
|
||||||
}
|
}
|
||||||
@@ -170,6 +176,21 @@ public abstract class AbstractMonitorSinkService implements ApplicationListener<
|
|||||||
return pointList;
|
return pointList;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private List<MetricSinkPoint> connectConnectorMetric2SinkPoint(List<ConnectorMetrics> connectorMetricsList){
|
||||||
|
List<MetricSinkPoint> pointList = new ArrayList<>();
|
||||||
|
|
||||||
|
for(ConnectorMetrics metrics : connectorMetricsList){
|
||||||
|
Map<String, Object> tagsMap = new HashMap<>();
|
||||||
|
tagsMap.put(CLUSTER_ID.getName(), metrics.getClusterPhyId());
|
||||||
|
tagsMap.put(CONNECT_CLUSTER_ID.getName(), metrics.getConnectClusterId());
|
||||||
|
tagsMap.put(CONNECT_CONNECTOR.getName(), metrics.getConnectorName());
|
||||||
|
|
||||||
|
pointList.addAll(genSinkPoint("ConnectConnector", metrics.getMetrics(), metrics.getTimestamp(), tagsMap));
|
||||||
|
}
|
||||||
|
|
||||||
|
return pointList;
|
||||||
|
}
|
||||||
|
|
||||||
private List<MetricSinkPoint> genSinkPoint(String metricPre,
|
private List<MetricSinkPoint> genSinkPoint(String metricPre,
|
||||||
Map<String, Float> metrics,
|
Map<String, Float> metrics,
|
||||||
long timeStamp,
|
long timeStamp,
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.persistence.es.dao.connect;
|
package com.xiaojukeji.know.streaming.km.persistence.es.dao.connect.cluster;
|
||||||
|
|
||||||
import com.didiglobal.logi.elasticsearch.client.response.query.query.ESQueryResponse;
|
import com.didiglobal.logi.elasticsearch.client.response.query.query.ESQueryResponse;
|
||||||
import com.didiglobal.logi.elasticsearch.client.response.query.query.aggs.ESAggr;
|
import com.didiglobal.logi.elasticsearch.client.response.query.query.aggs.ESAggr;
|
||||||
@@ -12,6 +12,7 @@ import org.apache.kafka.clients.admin.AdminClientConfig;
|
|||||||
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.Duration;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@@ -76,10 +77,12 @@ public class KafkaAdminClient extends AbstractClusterLoadedChangedHandler {
|
|||||||
|
|
||||||
LOGGER.info("close kafka AdminClient starting, clusterPhyId:{}", clusterPhyId);
|
LOGGER.info("close kafka AdminClient starting, clusterPhyId:{}", clusterPhyId);
|
||||||
|
|
||||||
boolean allSuccess = this.closeAdminClientList(adminClientList);
|
boolean allSuccess = this.closeAdminClientList(clusterPhyId, adminClientList);
|
||||||
|
|
||||||
if (allSuccess) {
|
if (allSuccess) {
|
||||||
LOGGER.info("close kafka AdminClient success, clusterPhyId:{}", clusterPhyId);
|
LOGGER.info("close kafka AdminClient success, clusterPhyId:{}", clusterPhyId);
|
||||||
|
} else {
|
||||||
|
LOGGER.error("close kafka AdminClient exist failed and can ignore this error, clusterPhyId:{}", clusterPhyId);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("close kafka AdminClient failed, clusterPhyId:{}", clusterPhyId, e);
|
LOGGER.error("close kafka AdminClient failed, clusterPhyId:{}", clusterPhyId, e);
|
||||||
@@ -116,6 +119,7 @@ public class KafkaAdminClient extends AbstractClusterLoadedChangedHandler {
|
|||||||
|
|
||||||
adminClientList = new ArrayList<>();
|
adminClientList = new ArrayList<>();
|
||||||
for (int i = 0; i < clientCnt; ++i) {
|
for (int i = 0; i < clientCnt; ++i) {
|
||||||
|
props.put(AdminClientConfig.CLIENT_ID_CONFIG, String.format("ApacheAdminClient||clusterPhyId=%d||Cnt=%d", clusterPhyId, i));
|
||||||
adminClientList.add(AdminClient.create(props));
|
adminClientList.add(AdminClient.create(props));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -125,7 +129,7 @@ public class KafkaAdminClient extends AbstractClusterLoadedChangedHandler {
|
|||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("create kafka AdminClient failed, clusterPhyId:{} props:{}", clusterPhyId, props, e);
|
LOGGER.error("create kafka AdminClient failed, clusterPhyId:{} props:{}", clusterPhyId, props, e);
|
||||||
|
|
||||||
this.closeAdminClientList(adminClientList);
|
this.closeAdminClientList(clusterPhyId, adminClientList);
|
||||||
} finally {
|
} finally {
|
||||||
modifyClientMapLock.unlock();
|
modifyClientMapLock.unlock();
|
||||||
}
|
}
|
||||||
@@ -133,7 +137,7 @@ public class KafkaAdminClient extends AbstractClusterLoadedChangedHandler {
|
|||||||
return KAFKA_ADMIN_CLIENT_MAP.get(clusterPhyId).get((int)(System.currentTimeMillis() % clientCnt));
|
return KAFKA_ADMIN_CLIENT_MAP.get(clusterPhyId).get((int)(System.currentTimeMillis() % clientCnt));
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean closeAdminClientList(List<AdminClient> adminClientList) {
|
private boolean closeAdminClientList(Long clusterPhyId, List<AdminClient> adminClientList) {
|
||||||
if (adminClientList == null) {
|
if (adminClientList == null) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -141,9 +145,11 @@ public class KafkaAdminClient extends AbstractClusterLoadedChangedHandler {
|
|||||||
boolean allSuccess = true;
|
boolean allSuccess = true;
|
||||||
for (AdminClient adminClient: adminClientList) {
|
for (AdminClient adminClient: adminClientList) {
|
||||||
try {
|
try {
|
||||||
adminClient.close();
|
// 关闭客户端,超时时间为30秒
|
||||||
|
adminClient.close(Duration.ofSeconds(30));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
// ignore
|
// ignore
|
||||||
|
LOGGER.error("close kafka AdminClient exist failed, clusterPhyId:{}", clusterPhyId, e);
|
||||||
allSuccess = false;
|
allSuccess = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -157,3 +157,7 @@ INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `l
|
|||||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2046', '0', 'know-streaming');
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2046', '0', 'know-streaming');
|
||||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2048', '0', 'know-streaming');
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2048', '0', 'know-streaming');
|
||||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2050', '0', 'know-streaming');
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2050', '0', 'know-streaming');
|
||||||
|
|
||||||
|
-- 多集群管理权限2023-07-18新增
|
||||||
|
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2052', 'Security-User查看密码', '1593', '1', '2', 'Security-User查看密码', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2052', '0', 'know-streaming');
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
|||||||
import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectActionEnum;
|
import com.xiaojukeji.know.streaming.km.common.enums.connect.ConnectActionEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.OpConnectorService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.plugin.PluginService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.plugin.PluginService;
|
||||||
import io.swagger.annotations.Api;
|
import io.swagger.annotations.Api;
|
||||||
import io.swagger.annotations.ApiOperation;
|
import io.swagger.annotations.ApiOperation;
|
||||||
@@ -31,9 +31,8 @@ import org.springframework.web.bind.annotation.*;
|
|||||||
@RestController
|
@RestController
|
||||||
@RequestMapping(ApiPrefix.API_V3_CONNECT_PREFIX)
|
@RequestMapping(ApiPrefix.API_V3_CONNECT_PREFIX)
|
||||||
public class KafkaConnectorController {
|
public class KafkaConnectorController {
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private ConnectorService connectorService;
|
private OpConnectorService opConnectorService;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private ConnectorManager connectorManager;
|
private ConnectorManager connectorManager;
|
||||||
@@ -56,7 +55,7 @@ public class KafkaConnectorController {
|
|||||||
@DeleteMapping(value ="connectors")
|
@DeleteMapping(value ="connectors")
|
||||||
@ResponseBody
|
@ResponseBody
|
||||||
public Result<Void> deleteConnectors(@Validated @RequestBody ConnectorDeleteDTO dto) {
|
public Result<Void> deleteConnectors(@Validated @RequestBody ConnectorDeleteDTO dto) {
|
||||||
return connectorService.deleteConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
|
return opConnectorService.deleteConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ApiOperation(value = "操作Connector", notes = "")
|
@ApiOperation(value = "操作Connector", notes = "")
|
||||||
@@ -64,11 +63,11 @@ public class KafkaConnectorController {
|
|||||||
@ResponseBody
|
@ResponseBody
|
||||||
public Result<Void> operateConnectors(@Validated @RequestBody ConnectorActionDTO dto) {
|
public Result<Void> operateConnectors(@Validated @RequestBody ConnectorActionDTO dto) {
|
||||||
if (ConnectActionEnum.RESTART.getValue().equals(dto.getAction())) {
|
if (ConnectActionEnum.RESTART.getValue().equals(dto.getAction())) {
|
||||||
return connectorService.restartConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
|
return opConnectorService.restartConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
|
||||||
} else if (ConnectActionEnum.STOP.getValue().equals(dto.getAction())) {
|
} else if (ConnectActionEnum.STOP.getValue().equals(dto.getAction())) {
|
||||||
return connectorService.stopConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
|
return opConnectorService.stopConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
|
||||||
} else if (ConnectActionEnum.RESUME.getValue().equals(dto.getAction())) {
|
} else if (ConnectActionEnum.RESUME.getValue().equals(dto.getAction())) {
|
||||||
return connectorService.resumeConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
|
return opConnectorService.resumeConnector(dto.getConnectClusterId(), dto.getConnectorName(), HttpRequestUtil.getOperator());
|
||||||
}
|
}
|
||||||
|
|
||||||
return Result.buildFailure(ResultStatus.PARAM_ILLEGAL);
|
return Result.buildFailure(ResultStatus.PARAM_ILLEGAL);
|
||||||
|
|||||||
@@ -3,17 +3,15 @@ package com.xiaojukeji.know.streaming.km.task.connect.metadata;
|
|||||||
import com.didiglobal.logi.job.annotation.Task;
|
import com.didiglobal.logi.job.annotation.Task;
|
||||||
import com.didiglobal.logi.job.common.TaskResult;
|
import com.didiglobal.logi.job.common.TaskResult;
|
||||||
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
|
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
|
||||||
import com.didiglobal.logi.log.ILog;
|
|
||||||
import com.didiglobal.logi.log.LogFactory;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.ConnectCluster;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.connect.connector.KSConnector;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.Tuple;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
|
import com.xiaojukeji.know.streaming.km.core.service.connect.connector.ConnectorService;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
|
||||||
@Task(name = "SyncConnectorTask",
|
@Task(name = "SyncConnectorTask",
|
||||||
@@ -23,40 +21,21 @@ import java.util.List;
|
|||||||
consensual = ConsensualEnum.BROADCAST,
|
consensual = ConsensualEnum.BROADCAST,
|
||||||
timeout = 2 * 60)
|
timeout = 2 * 60)
|
||||||
public class SyncConnectorTask extends AbstractAsyncMetadataDispatchTask {
|
public class SyncConnectorTask extends AbstractAsyncMetadataDispatchTask {
|
||||||
private static final ILog LOGGER = LogFactory.getLog(SyncConnectorTask.class);
|
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private ConnectorService connectorService;
|
private ConnectorService connectorService;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TaskResult processClusterTask(ConnectCluster connectCluster, long triggerTimeUnitMs) {
|
public TaskResult processClusterTask(ConnectCluster connectCluster, long triggerTimeUnitMs) {
|
||||||
Result<List<String>> nameListResult = connectorService.listConnectorsFromCluster(connectCluster.getId());
|
// 获取信息
|
||||||
if (nameListResult.failed()) {
|
Result<Tuple<Set<String>, List<KSConnector>>> dataResult = connectorService.getDataFromKafka(connectCluster);
|
||||||
return TaskResult.FAIL;
|
if (dataResult.failed()) {
|
||||||
|
return new TaskResult(TaskResult.FAIL_CODE, dataResult.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean allSuccess = true;
|
// 更新到DB
|
||||||
|
connectorService.writeToDB( connectCluster.getId(), dataResult.getData().v1(), dataResult.getData().v2());
|
||||||
|
|
||||||
List<KSConnector> connectorList = new ArrayList<>();
|
// 返回结果
|
||||||
for (String connectorName: nameListResult.getData()) {
|
return dataResult.getData().v1().size() == dataResult.getData().v2().size()? TaskResult.SUCCESS: TaskResult.FAIL;
|
||||||
Result<KSConnector> ksConnectorResult = connectorService.getAllConnectorInfoFromCluster(connectCluster.getId(), connectorName);
|
|
||||||
if (ksConnectorResult.failed()) {
|
|
||||||
LOGGER.error(
|
|
||||||
"method=processClusterTask||connectClusterId={}||connectorName={}||result={}",
|
|
||||||
connectCluster.getId(), connectorName, ksConnectorResult
|
|
||||||
);
|
|
||||||
|
|
||||||
allSuccess = false;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
connectorList.add(ksConnectorResult.getData());
|
|
||||||
}
|
|
||||||
|
|
||||||
//mm2相关信息的添加
|
|
||||||
connectorService.completeMirrorMakerInfo(connectCluster, connectorList);
|
|
||||||
|
|
||||||
connectorService.batchReplace(connectCluster.getKafkaClusterPhyId(), connectCluster.getId(), connectorList, new HashSet<>(nameListResult.getData()));
|
|
||||||
|
|
||||||
return allSuccess? TaskResult.SUCCESS: TaskResult.FAIL;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,19 +3,13 @@ package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
|
|||||||
import com.didiglobal.logi.job.annotation.Task;
|
import com.didiglobal.logi.job.annotation.Task;
|
||||||
import com.didiglobal.logi.job.common.TaskResult;
|
import com.didiglobal.logi.job.common.TaskResult;
|
||||||
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
|
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
|
||||||
import com.didiglobal.logi.log.ILog;
|
|
||||||
import com.didiglobal.logi.log.LogFactory;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.po.KafkaAclPO;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.converter.KafkaAclConverter;
|
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.acl.KafkaAclService;
|
import com.xiaojukeji.know.streaming.km.core.service.acl.KafkaAclService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.acl.OpKafkaAclService;
|
|
||||||
import org.apache.kafka.common.acl.AclBinding;
|
import org.apache.kafka.common.acl.AclBinding;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
@Task(name = "SyncKafkaAclTask",
|
@Task(name = "SyncKafkaAclTask",
|
||||||
description = "KafkaAcl信息同步到DB",
|
description = "KafkaAcl信息同步到DB",
|
||||||
@@ -24,32 +18,18 @@ import java.util.stream.Collectors;
|
|||||||
consensual = ConsensualEnum.BROADCAST,
|
consensual = ConsensualEnum.BROADCAST,
|
||||||
timeout = 2 * 60)
|
timeout = 2 * 60)
|
||||||
public class SyncKafkaAclTask extends AbstractAsyncMetadataDispatchTask {
|
public class SyncKafkaAclTask extends AbstractAsyncMetadataDispatchTask {
|
||||||
private static final ILog log = LogFactory.getLog(SyncKafkaAclTask.class);
|
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private KafkaAclService kafkaAclService;
|
private KafkaAclService kafkaAclService;
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private OpKafkaAclService opKafkaAclService;
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TaskResult processClusterTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) {
|
public TaskResult processClusterTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) {
|
||||||
Result<List<AclBinding>> aclBindingListResult = kafkaAclService.getAclFromKafka(clusterPhy.getId());
|
Result<List<AclBinding>> aclBindingListResult = kafkaAclService.getDataFromKafka(clusterPhy);
|
||||||
if (aclBindingListResult.failed()) {
|
if (aclBindingListResult.failed()) {
|
||||||
return TaskResult.FAIL;
|
return TaskResult.FAIL;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!aclBindingListResult.hasData()) {
|
kafkaAclService.writeToDB(clusterPhy.getId(), aclBindingListResult.getData());
|
||||||
return TaskResult.SUCCESS;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 更新DB数据
|
|
||||||
List<KafkaAclPO> poList = aclBindingListResult.getData()
|
|
||||||
.stream()
|
|
||||||
.map(elem -> KafkaAclConverter.convert2KafkaAclPO(clusterPhy.getId(), elem, triggerTimeUnitMs))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
|
|
||||||
opKafkaAclService.batchUpdateAcls(clusterPhy.getId(), poList);
|
|
||||||
return TaskResult.SUCCESS;
|
return TaskResult.SUCCESS;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,12 +3,8 @@ package com.xiaojukeji.know.streaming.km.task.kafka.metadata;
|
|||||||
import com.didiglobal.logi.job.annotation.Task;
|
import com.didiglobal.logi.job.annotation.Task;
|
||||||
import com.didiglobal.logi.job.common.TaskResult;
|
import com.didiglobal.logi.job.common.TaskResult;
|
||||||
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
|
import com.didiglobal.logi.job.core.consensual.ConsensualEnum;
|
||||||
import com.didiglobal.logi.log.ILog;
|
|
||||||
import com.didiglobal.logi.log.LogFactory;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.ZKConfig;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.zookeeper.ZookeeperInfo;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.zookeeper.ZookeeperInfo;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.zookeeper.ZookeeperService;
|
import com.xiaojukeji.know.streaming.km.core.service.zookeeper.ZookeeperService;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
@@ -23,24 +19,17 @@ import java.util.List;
|
|||||||
consensual = ConsensualEnum.BROADCAST,
|
consensual = ConsensualEnum.BROADCAST,
|
||||||
timeout = 2 * 60)
|
timeout = 2 * 60)
|
||||||
public class SyncZookeeperTask extends AbstractAsyncMetadataDispatchTask {
|
public class SyncZookeeperTask extends AbstractAsyncMetadataDispatchTask {
|
||||||
private static final ILog log = LogFactory.getLog(SyncZookeeperTask.class);
|
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private ZookeeperService zookeeperService;
|
private ZookeeperService zookeeperService;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TaskResult processClusterTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) {
|
public TaskResult processClusterTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) {
|
||||||
Result<List<ZookeeperInfo>> infoResult = zookeeperService.listFromZookeeper(
|
Result<List<ZookeeperInfo>> infoResult = zookeeperService.getDataFromKafka(clusterPhy);
|
||||||
clusterPhy.getId(),
|
|
||||||
clusterPhy.getZookeeper(),
|
|
||||||
ConvertUtil.str2ObjByJson(clusterPhy.getZkProperties(), ZKConfig.class)
|
|
||||||
);
|
|
||||||
|
|
||||||
if (infoResult.failed()) {
|
if (infoResult.failed()) {
|
||||||
return new TaskResult(TaskResult.FAIL_CODE, infoResult.getMessage());
|
return new TaskResult(TaskResult.FAIL_CODE, infoResult.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
zookeeperService.batchReplaceDataInDB(clusterPhy.getId(), infoResult.getData());
|
zookeeperService.writeToDB(clusterPhy.getId(), infoResult.getData());
|
||||||
|
|
||||||
return TaskResult.SUCCESS;
|
return TaskResult.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,53 @@
|
|||||||
|
package com.xiaojukeji.know.streaming.km.task.service.listener;
|
||||||
|
|
||||||
|
import com.didiglobal.logi.log.ILog;
|
||||||
|
import com.didiglobal.logi.log.LogFactory;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.event.cluster.connect.ClusterPhyDeletedEvent;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.component.SpringTool;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.BackoffUtils;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.FutureUtil;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.meta.MetaDataService;
|
||||||
|
import org.springframework.context.ApplicationListener;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
@Service
|
||||||
|
public class TaskClusterDeletedListener implements ApplicationListener<ClusterPhyDeletedEvent> {
|
||||||
|
private static final ILog LOGGER = LogFactory.getLog(TaskClusterDeletedListener.class);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onApplicationEvent(ClusterPhyDeletedEvent event) {
|
||||||
|
LOGGER.info("method=onApplicationEvent||clusterPhyId={}||msg=listened delete cluster", event.getClusterPhyId());
|
||||||
|
|
||||||
|
// 交由KS自定义的线程池,异步执行任务
|
||||||
|
FutureUtil.quickStartupFutureUtil.submitTask(
|
||||||
|
() -> {
|
||||||
|
// 延迟60秒,避免正在运行的任务,将数据写入DB中
|
||||||
|
BackoffUtils.backoff(60000);
|
||||||
|
|
||||||
|
for (MetaDataService metaDataService: SpringTool.getBeansOfType(MetaDataService.class).values()) {
|
||||||
|
LOGGER.info(
|
||||||
|
"method=onApplicationEvent||clusterPhyId={}||className={}||msg=delete cluster data in db starting",
|
||||||
|
event.getClusterPhyId(), metaDataService.getClass().getSimpleName()
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 删除数据
|
||||||
|
metaDataService.deleteInDBByKafkaClusterId(event.getClusterPhyId());
|
||||||
|
|
||||||
|
LOGGER.info(
|
||||||
|
"method=onApplicationEvent||clusterPhyId={}||className={}||msg=delete cluster data in db finished",
|
||||||
|
event.getClusterPhyId(), metaDataService.getClass().getSimpleName()
|
||||||
|
);
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error(
|
||||||
|
"method=onApplicationEvent||clusterPhyId={}||className={}||msg=delete cluster data in db failed||errMsg=exception",
|
||||||
|
event.getClusterPhyId(), metaDataService.getClass().getSimpleName(), e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user