add v2.2.0 feature & fix

This commit is contained in:
zengqiao
2021-01-23 13:19:29 +08:00
parent fc109fd1b1
commit 850d43df63
34 changed files with 429 additions and 156 deletions

View File

@@ -0,0 +1,27 @@
---
![kafka-manager-logo](../../assets/images/common/logo_name.png)
**一站式`Apache Kafka`集群指标监控与运维管控平台**
---
# 升级至`2.2.0`版本
`2.2.0`版本在`cluster`表及`logical_cluster`各增加了一个字段因此需要执行下面的sql进行字段的增加。
```sql
# cluster表中增加jmx_properties字段, 这个字段会用于存储jmx相关的认证以及配置信息
ALTER TABLE `cluster` ADD COLUMN `jmx_properties` TEXT NULL COMMENT 'JMX配置' AFTER `security_properties`;
# logical_cluster中增加identification字段, 同时数据和原先name数据相同, 最后增加一个唯一键.
# 此后, name字段还是表示集群名称, identification字段表示的是集群标识, 只能是字母数字及下划线组成,
# 数据上报到监控系统时, 集群这个标识采用的字段就是identification字段, 之前使用的是name字段.
ALTER TABLE `logical_cluster` ADD COLUMN `identification` VARCHAR(192) NOT NULL DEFAULT '' COMMENT '逻辑集群标识' AFTER `name`;
UPDATE `logical_cluster` SET `identification`=`name` WHERE id>=0;
ALTER TABLE `logical_cluster` ADD INDEX `uniq_identification` (`identification` ASC);
```

View File

@@ -1,3 +1,8 @@
-- create database
CREATE DATABASE logi_kafka_manager;
USE logi_kafka_manager;
-- --
-- Table structure for table `account` -- Table structure for table `account`
-- --
@@ -104,7 +109,8 @@ CREATE TABLE `cluster` (
`zookeeper` varchar(512) NOT NULL DEFAULT '' COMMENT 'zk地址', `zookeeper` varchar(512) NOT NULL DEFAULT '' COMMENT 'zk地址',
`bootstrap_servers` varchar(512) NOT NULL DEFAULT '' COMMENT 'server地址', `bootstrap_servers` varchar(512) NOT NULL DEFAULT '' COMMENT 'server地址',
`kafka_version` varchar(32) NOT NULL DEFAULT '' COMMENT 'kafka版本', `kafka_version` varchar(32) NOT NULL DEFAULT '' COMMENT 'kafka版本',
`security_properties` text COMMENT '安全认证参数', `security_properties` text COMMENT 'Kafka安全认证参数',
`jmx_properties` text COMMENT 'JMX配置',
`status` tinyint(4) NOT NULL DEFAULT '1' COMMENT ' 监控标记, 0表示未监控, 1表示监控中', `status` tinyint(4) NOT NULL DEFAULT '1' COMMENT ' 监控标记, 0表示未监控, 1表示监控中',
`gmt_create` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', `gmt_create` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`gmt_modify` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间', `gmt_modify` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
@@ -302,20 +308,22 @@ INSERT INTO kafka_user(app_id, password, user_type, operation) VALUES ('dkm_admi
-- Table structure for table `logical_cluster` -- Table structure for table `logical_cluster`
-- --
-- DROP TABLE IF EXISTS `logical_cluster`;
CREATE TABLE `logical_cluster` ( CREATE TABLE `logical_cluster` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT 'id', `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT 'id',
`name` varchar(192) NOT NULL DEFAULT '' COMMENT '逻辑集群名称', `name` varchar(192) NOT NULL DEFAULT '' COMMENT '逻辑集群名称',
`mode` int(16) NOT NULL DEFAULT '0' COMMENT '逻辑集群类型, 0:共享集群, 1:独享集群, 2:独立集群', `identification` varchar(192) NOT NULL DEFAULT '' COMMENT '逻辑集群标识',
`app_id` varchar(64) NOT NULL DEFAULT '' COMMENT '所属应用', `mode` int(16) NOT NULL DEFAULT '0' COMMENT '逻辑集群类型, 0:共享集群, 1:独享集群, 2:独立集群',
`cluster_id` bigint(20) NOT NULL DEFAULT '-1' COMMENT '集群id', `app_id` varchar(64) NOT NULL DEFAULT '' COMMENT '所属应用',
`region_list` varchar(256) NOT NULL DEFAULT '' COMMENT 'regionid列表', `cluster_id` bigint(20) NOT NULL DEFAULT '-1' COMMENT '集群id',
`description` text COMMENT '备注说明', `region_list` varchar(256) NOT NULL DEFAULT '' COMMENT 'regionid列表',
`gmt_create` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', `description` text COMMENT '备注说明',
`gmt_modify` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间', `gmt_create` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
PRIMARY KEY (`id`), `gmt_modify` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
UNIQUE KEY `uniq_name` (`name`) PRIMARY KEY (`id`),
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='逻辑集群信息表'; UNIQUE KEY `uniq_name` (`name`),
UNIQUE KEY `uniq_identification` (`identification`)
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8 COMMENT='逻辑集群信息表';
-- --
-- Table structure for table `monitor_rule` -- Table structure for table `monitor_rule`

View File

@@ -9,19 +9,39 @@
# 安装手册 # 安装手册
## 1、环境依赖
## 环境依赖 如果是以Release包进行安装的则仅安装`Java``MySQL`即可。如果是要先进行源码包进行打包,然后再使用,则需要安装`Maven``Node`环境。
- `Maven 3.5+`(后端打包依赖)
- `node v12+`(前端打包依赖)
- `Java 8+`(运行环境需要) - `Java 8+`(运行环境需要)
- `MySQL 5.7`(数据存储) - `MySQL 5.7`(数据存储)
- `Maven 3.5+`(后端打包依赖)
- `Node 10+`(前端打包依赖)
--- ---
## 环境初始化 ## 2、获取安装包
执行[create_mysql_table.sql](create_mysql_table.sql)中的SQL命令从而创建所需的MySQL库及表默认创建的库名是`kafka_manager` **1、Release直接下载**
这里如果觉得麻烦然后也不想进行二次开发则可以直接下载Release包下载地址[Github Release包下载地址](https://github.com/didi/Logi-KafkaManager/releases)
如果觉得Github的下载地址太慢了也可以进入`Logi-KafkaManager`的用户群获取群地址在README中。
**2、源代码进行打包**
下载好代码之后,进入`Logi-KafkaManager`的主目录,执行`sh build.sh`命令即可,执行完成之后会在`output/kafka-manager-xxx`目录下面生成一个jar包。
对于`windows`环境的用户,估计执行不了`sh build.sh`命令,因此可以直接执行`mvn install`,然后在`kafka-manager-web/target`目录下生成一个kafka-manager-web-xxx.jar的包。
获取到jar包之后我们继续下面的步骤。
---
## 3、MySQL-DB初始化
执行[create_mysql_table.sql](create_mysql_table.sql)中的SQL命令从而创建所需的MySQL库及表默认创建的库名是`logi_kafka_manager`
``` ```
# 示例: # 示例:
@@ -30,29 +50,15 @@ mysql -uXXXX -pXXX -h XXX.XXX.XXX.XXX -PXXXX < ./create_mysql_table.sql
--- ---
## 打包 ## 4、启动
```bash
# 一次性打包
cd ..
mvn install
``` ```
# application.yml 是配置文件最简单的是仅修改MySQL相关的配置即可启动
--- nohup java -jar kafka-manager.jar --spring.config.location=./application.yml > /dev/null 2>&1 &
## 启动
```
# application.yml 是配置文件
cp kafka-manager-web/src/main/resources/application.yml kafka-manager-web/target/
cd kafka-manager-web/target/
nohup java -jar kafka-manager-web-2.1.0-SNAPSHOT.jar --spring.config.location=./application.yml > /dev/null 2>&1 &
``` ```
## 使用 ### 5、使用
本地启动的话,访问`http://localhost:8080`,输入帐号及密码(默认`admin/admin`)进行登录。更多参考:[kafka-manager 用户使用手册](../user_guide/user_guide_cn.md) 本地启动的话,访问`http://localhost:8080`,输入帐号及密码(默认`admin/admin`)进行登录。更多参考:[kafka-manager 用户使用手册](../user_guide/user_guide_cn.md)

View File

@@ -9,6 +9,8 @@ public class LogicalCluster {
private String logicalClusterName; private String logicalClusterName;
private String logicalClusterIdentification;
private Integer mode; private Integer mode;
private Integer topicNum; private Integer topicNum;
@@ -41,6 +43,14 @@ public class LogicalCluster {
this.logicalClusterName = logicalClusterName; this.logicalClusterName = logicalClusterName;
} }
public String getLogicalClusterIdentification() {
return logicalClusterIdentification;
}
public void setLogicalClusterIdentification(String logicalClusterIdentification) {
this.logicalClusterIdentification = logicalClusterIdentification;
}
public Integer getMode() { public Integer getMode() {
return mode; return mode;
} }
@@ -81,6 +91,14 @@ public class LogicalCluster {
this.bootstrapServers = bootstrapServers; this.bootstrapServers = bootstrapServers;
} }
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Long getGmtCreate() { public Long getGmtCreate() {
return gmtCreate; return gmtCreate;
} }
@@ -97,19 +115,12 @@ public class LogicalCluster {
this.gmtModify = gmtModify; this.gmtModify = gmtModify;
} }
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override @Override
public String toString() { public String toString() {
return "LogicalCluster{" + return "LogicalCluster{" +
"logicalClusterId=" + logicalClusterId + "logicalClusterId=" + logicalClusterId +
", logicalClusterName='" + logicalClusterName + '\'' + ", logicalClusterName='" + logicalClusterName + '\'' +
", logicalClusterIdentification='" + logicalClusterIdentification + '\'' +
", mode=" + mode + ", mode=" + mode +
", topicNum=" + topicNum + ", topicNum=" + topicNum +
", clusterVersion='" + clusterVersion + '\'' + ", clusterVersion='" + clusterVersion + '\'' +

View File

@@ -27,9 +27,12 @@ public class ClusterDTO {
@ApiModelProperty(value="数据中心") @ApiModelProperty(value="数据中心")
private String idc; private String idc;
@ApiModelProperty(value="安全配置参数") @ApiModelProperty(value="Kafka安全配置")
private String securityProperties; private String securityProperties;
@ApiModelProperty(value="Jmx配置")
private String jmxProperties;
public Long getClusterId() { public Long getClusterId() {
return clusterId; return clusterId;
} }
@@ -78,6 +81,14 @@ public class ClusterDTO {
this.securityProperties = securityProperties; this.securityProperties = securityProperties;
} }
public String getJmxProperties() {
return jmxProperties;
}
public void setJmxProperties(String jmxProperties) {
this.jmxProperties = jmxProperties;
}
@Override @Override
public String toString() { public String toString() {
return "ClusterDTO{" + return "ClusterDTO{" +
@@ -87,6 +98,7 @@ public class ClusterDTO {
", bootstrapServers='" + bootstrapServers + '\'' + ", bootstrapServers='" + bootstrapServers + '\'' +
", idc='" + idc + '\'' + ", idc='" + idc + '\'' +
", securityProperties='" + securityProperties + '\'' + ", securityProperties='" + securityProperties + '\'' +
", jmxProperties='" + jmxProperties + '\'' +
'}'; '}';
} }

View File

@@ -21,6 +21,9 @@ public class LogicalClusterDTO {
@ApiModelProperty(value = "名称") @ApiModelProperty(value = "名称")
private String name; private String name;
@ApiModelProperty(value = "集群标识, 用于告警的上报")
private String identification;
@ApiModelProperty(value = "集群模式") @ApiModelProperty(value = "集群模式")
private Integer mode; private Integer mode;
@@ -52,6 +55,14 @@ public class LogicalClusterDTO {
this.name = name; this.name = name;
} }
public String getIdentification() {
return identification;
}
public void setIdentification(String identification) {
this.identification = identification;
}
public Integer getMode() { public Integer getMode() {
return mode; return mode;
} }
@@ -97,6 +108,7 @@ public class LogicalClusterDTO {
return "LogicalClusterDTO{" + return "LogicalClusterDTO{" +
"id=" + id + "id=" + id +
", name='" + name + '\'' + ", name='" + name + '\'' +
", identification='" + identification + '\'' +
", mode=" + mode + ", mode=" + mode +
", clusterId=" + clusterId + ", clusterId=" + clusterId +
", regionIdList=" + regionIdList + ", regionIdList=" + regionIdList +
@@ -117,6 +129,7 @@ public class LogicalClusterDTO {
} }
appId = ValidateUtils.isNull(appId)? "": appId; appId = ValidateUtils.isNull(appId)? "": appId;
description = ValidateUtils.isNull(description)? "": description; description = ValidateUtils.isNull(description)? "": description;
identification = ValidateUtils.isNull(identification)? name: identification;
return true; return true;
} }
} }

View File

@@ -17,6 +17,8 @@ public class ClusterDO implements Comparable<ClusterDO> {
private String securityProperties; private String securityProperties;
private String jmxProperties;
private Integer status; private Integer status;
private Date gmtCreate; private Date gmtCreate;
@@ -31,30 +33,6 @@ public class ClusterDO implements Comparable<ClusterDO> {
this.id = id; this.id = id;
} }
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Date getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Date gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Date getGmtModify() {
return gmtModify;
}
public void setGmtModify(Date gmtModify) {
this.gmtModify = gmtModify;
}
public String getClusterName() { public String getClusterName() {
return clusterName; return clusterName;
} }
@@ -87,6 +65,38 @@ public class ClusterDO implements Comparable<ClusterDO> {
this.securityProperties = securityProperties; this.securityProperties = securityProperties;
} }
public String getJmxProperties() {
return jmxProperties;
}
public void setJmxProperties(String jmxProperties) {
this.jmxProperties = jmxProperties;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Date getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Date gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Date getGmtModify() {
return gmtModify;
}
public void setGmtModify(Date gmtModify) {
this.gmtModify = gmtModify;
}
@Override @Override
public String toString() { public String toString() {
return "ClusterDO{" + return "ClusterDO{" +
@@ -95,6 +105,7 @@ public class ClusterDO implements Comparable<ClusterDO> {
", zookeeper='" + zookeeper + '\'' + ", zookeeper='" + zookeeper + '\'' +
", bootstrapServers='" + bootstrapServers + '\'' + ", bootstrapServers='" + bootstrapServers + '\'' +
", securityProperties='" + securityProperties + '\'' + ", securityProperties='" + securityProperties + '\'' +
", jmxProperties='" + jmxProperties + '\'' +
", status=" + status + ", status=" + status +
", gmtCreate=" + gmtCreate + ", gmtCreate=" + gmtCreate +
", gmtModify=" + gmtModify + ", gmtModify=" + gmtModify +

View File

@@ -11,6 +11,8 @@ public class LogicalClusterDO {
private String name; private String name;
private String identification;
private Integer mode; private Integer mode;
private String appId; private String appId;
@@ -41,6 +43,14 @@ public class LogicalClusterDO {
this.name = name; this.name = name;
} }
public String getIdentification() {
return identification;
}
public void setIdentification(String identification) {
this.identification = identification;
}
public Integer getMode() { public Integer getMode() {
return mode; return mode;
} }
@@ -102,6 +112,7 @@ public class LogicalClusterDO {
return "LogicalClusterDO{" + return "LogicalClusterDO{" +
"id=" + id + "id=" + id +
", name='" + name + '\'' + ", name='" + name + '\'' +
", identification='" + identification + '\'' +
", mode=" + mode + ", mode=" + mode +
", appId='" + appId + '\'' + ", appId='" + appId + '\'' +
", clusterId=" + clusterId + ", clusterId=" + clusterId +

View File

@@ -15,6 +15,9 @@ public class LogicClusterVO {
@ApiModelProperty(value="逻辑集群名称") @ApiModelProperty(value="逻辑集群名称")
private String clusterName; private String clusterName;
@ApiModelProperty(value="逻辑标识")
private String clusterIdentification;
@ApiModelProperty(value="逻辑集群类型, 0:共享集群, 1:独享集群, 2:独立集群") @ApiModelProperty(value="逻辑集群类型, 0:共享集群, 1:独享集群, 2:独立集群")
private Integer mode; private Integer mode;
@@ -24,9 +27,6 @@ public class LogicClusterVO {
@ApiModelProperty(value="集群版本") @ApiModelProperty(value="集群版本")
private String clusterVersion; private String clusterVersion;
@ApiModelProperty(value="物理集群ID")
private Long physicalClusterId;
@ApiModelProperty(value="集群服务地址") @ApiModelProperty(value="集群服务地址")
private String bootstrapServers; private String bootstrapServers;
@@ -55,6 +55,22 @@ public class LogicClusterVO {
this.clusterName = clusterName; this.clusterName = clusterName;
} }
public String getClusterIdentification() {
return clusterIdentification;
}
public void setClusterIdentification(String clusterIdentification) {
this.clusterIdentification = clusterIdentification;
}
public Integer getMode() {
return mode;
}
public void setMode(Integer mode) {
this.mode = mode;
}
public Integer getTopicNum() { public Integer getTopicNum() {
return topicNum; return topicNum;
} }
@@ -71,14 +87,6 @@ public class LogicClusterVO {
this.clusterVersion = clusterVersion; this.clusterVersion = clusterVersion;
} }
public Long getPhysicalClusterId() {
return physicalClusterId;
}
public void setPhysicalClusterId(Long physicalClusterId) {
this.physicalClusterId = physicalClusterId;
}
public String getBootstrapServers() { public String getBootstrapServers() {
return bootstrapServers; return bootstrapServers;
} }
@@ -87,6 +95,14 @@ public class LogicClusterVO {
this.bootstrapServers = bootstrapServers; this.bootstrapServers = bootstrapServers;
} }
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Long getGmtCreate() { public Long getGmtCreate() {
return gmtCreate; return gmtCreate;
} }
@@ -103,32 +119,15 @@ public class LogicClusterVO {
this.gmtModify = gmtModify; this.gmtModify = gmtModify;
} }
public Integer getMode() {
return mode;
}
public void setMode(Integer mode) {
this.mode = mode;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override @Override
public String toString() { public String toString() {
return "LogicClusterVO{" + return "LogicClusterVO{" +
"clusterId=" + clusterId + "clusterId=" + clusterId +
", clusterName='" + clusterName + '\'' + ", clusterName='" + clusterName + '\'' +
", clusterIdentification='" + clusterIdentification + '\'' +
", mode=" + mode + ", mode=" + mode +
", topicNum=" + topicNum + ", topicNum=" + topicNum +
", clusterVersion='" + clusterVersion + '\'' + ", clusterVersion='" + clusterVersion + '\'' +
", physicalClusterId=" + physicalClusterId +
", bootstrapServers='" + bootstrapServers + '\'' + ", bootstrapServers='" + bootstrapServers + '\'' +
", description='" + description + '\'' + ", description='" + description + '\'' +
", gmtCreate=" + gmtCreate + ", gmtCreate=" + gmtCreate +

View File

@@ -32,9 +32,12 @@ public class ClusterBaseVO {
@ApiModelProperty(value="集群类型") @ApiModelProperty(value="集群类型")
private Integer mode; private Integer mode;
@ApiModelProperty(value="安全配置参数") @ApiModelProperty(value="Kafka安全配置")
private String securityProperties; private String securityProperties;
@ApiModelProperty(value="Jmx配置")
private String jmxProperties;
@ApiModelProperty(value="1:监控中, 0:暂停监控") @ApiModelProperty(value="1:监控中, 0:暂停监控")
private Integer status; private Integer status;
@@ -108,6 +111,14 @@ public class ClusterBaseVO {
this.securityProperties = securityProperties; this.securityProperties = securityProperties;
} }
public String getJmxProperties() {
return jmxProperties;
}
public void setJmxProperties(String jmxProperties) {
this.jmxProperties = jmxProperties;
}
public Integer getStatus() { public Integer getStatus() {
return status; return status;
} }
@@ -141,8 +152,9 @@ public class ClusterBaseVO {
", bootstrapServers='" + bootstrapServers + '\'' + ", bootstrapServers='" + bootstrapServers + '\'' +
", kafkaVersion='" + kafkaVersion + '\'' + ", kafkaVersion='" + kafkaVersion + '\'' +
", idc='" + idc + '\'' + ", idc='" + idc + '\'' +
", mode='" + mode + '\'' + ", mode=" + mode +
", securityProperties='" + securityProperties + '\'' + ", securityProperties='" + securityProperties + '\'' +
", jmxProperties='" + jmxProperties + '\'' +
", status=" + status + ", status=" + status +
", gmtCreate=" + gmtCreate + ", gmtCreate=" + gmtCreate +
", gmtModify=" + gmtModify + ", gmtModify=" + gmtModify +

View File

@@ -18,6 +18,9 @@ public class LogicalClusterVO {
@ApiModelProperty(value = "逻辑集群名称") @ApiModelProperty(value = "逻辑集群名称")
private String logicalClusterName; private String logicalClusterName;
@ApiModelProperty(value = "逻辑集群标识")
private String logicalClusterIdentification;
@ApiModelProperty(value = "物理集群ID") @ApiModelProperty(value = "物理集群ID")
private Long physicalClusterId; private Long physicalClusterId;
@@ -55,6 +58,14 @@ public class LogicalClusterVO {
this.logicalClusterName = logicalClusterName; this.logicalClusterName = logicalClusterName;
} }
public String getLogicalClusterIdentification() {
return logicalClusterIdentification;
}
public void setLogicalClusterIdentification(String logicalClusterIdentification) {
this.logicalClusterIdentification = logicalClusterIdentification;
}
public Long getPhysicalClusterId() { public Long getPhysicalClusterId() {
return physicalClusterId; return physicalClusterId;
} }
@@ -116,6 +127,7 @@ public class LogicalClusterVO {
return "LogicalClusterVO{" + return "LogicalClusterVO{" +
"logicalClusterId=" + logicalClusterId + "logicalClusterId=" + logicalClusterId +
", logicalClusterName='" + logicalClusterName + '\'' + ", logicalClusterName='" + logicalClusterName + '\'' +
", logicalClusterIdentification='" + logicalClusterIdentification + '\'' +
", physicalClusterId=" + physicalClusterId + ", physicalClusterId=" + physicalClusterId +
", regionIdList=" + regionIdList + ", regionIdList=" + regionIdList +
", mode=" + mode + ", mode=" + mode +

View File

@@ -53,6 +53,13 @@ public class JsonUtils {
return JSON.toJSONString(obj); return JSON.toJSONString(obj);
} }
public static <T> T stringToObj(String src, Class<T> clazz) {
if (ValidateUtils.isBlank(src)) {
return null;
}
return JSON.parseObject(src, clazz);
}
public static List<TopicConnectionDO> parseTopicConnections(Long clusterId, JSONObject jsonObject, long postTime) { public static List<TopicConnectionDO> parseTopicConnections(Long clusterId, JSONObject jsonObject, long postTime) {
List<TopicConnectionDO> connectionDOList = new ArrayList<>(); List<TopicConnectionDO> connectionDOList = new ArrayList<>();
for (String clientType: jsonObject.keySet()) { for (String clientType: jsonObject.keySet()) {

View File

@@ -0,0 +1,65 @@
package com.xiaojukeji.kafka.manager.common.utils.jmx;
public class JmxConfig {
/**
* 单台最大连接数
*/
private Integer maxConn;
/**
* 用户名
*/
private String username;
/**
* 密码
*/
private String password;
/**
* 开启SSL
*/
private Boolean openSSL;
public Integer getMaxConn() {
return maxConn;
}
public void setMaxConn(Integer maxConn) {
this.maxConn = maxConn;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Boolean isOpenSSL() {
return openSSL;
}
public void setOpenSSL(Boolean openSSL) {
this.openSSL = openSSL;
}
@Override
public String toString() {
return "JmxConfig{" +
"maxConn=" + maxConn +
", username='" + username + '\'' +
", password='" + password + '\'' +
", openSSL=" + openSSL +
'}';
}
}

View File

@@ -1,5 +1,6 @@
package com.xiaojukeji.kafka.manager.common.utils.jmx; package com.xiaojukeji.kafka.manager.common.utils.jmx;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -7,8 +8,14 @@ import javax.management.*;
import javax.management.remote.JMXConnector; import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory; import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL; import javax.management.remote.JMXServiceURL;
import javax.management.remote.rmi.RMIConnectorServer;
import javax.naming.Context;
import javax.rmi.ssl.SslRMIClientSocketFactory;
import java.io.IOException; import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
@@ -28,13 +35,19 @@ public class JmxConnectorWrap {
private AtomicInteger atomicInteger; private AtomicInteger atomicInteger;
public JmxConnectorWrap(String host, int port, int maxConn) { private JmxConfig jmxConfig;
public JmxConnectorWrap(String host, int port, JmxConfig jmxConfig) {
this.host = host; this.host = host;
this.port = port; this.port = port;
if (maxConn <= 0) { this.jmxConfig = jmxConfig;
maxConn = 1; if (ValidateUtils.isNull(this.jmxConfig)) {
this.jmxConfig = new JmxConfig();
} }
this.atomicInteger = new AtomicInteger(maxConn); if (ValidateUtils.isNullOrLessThanZero(this.jmxConfig.getMaxConn())) {
this.jmxConfig.setMaxConn(1);
}
this.atomicInteger = new AtomicInteger(this.jmxConfig.getMaxConn());
} }
public boolean checkJmxConnectionAndInitIfNeed() { public boolean checkJmxConnectionAndInitIfNeed() {
@@ -64,8 +77,18 @@ public class JmxConnectorWrap {
} }
String jmxUrl = String.format("service:jmx:rmi:///jndi/rmi://%s:%d/jmxrmi", host, port); String jmxUrl = String.format("service:jmx:rmi:///jndi/rmi://%s:%d/jmxrmi", host, port);
try { try {
JMXServiceURL url = new JMXServiceURL(jmxUrl); Map<String, Object> environment = new HashMap<String, Object>();
jmxConnector = JMXConnectorFactory.connect(url, null); if (!ValidateUtils.isBlank(this.jmxConfig.getUsername()) && !ValidateUtils.isBlank(this.jmxConfig.getPassword())) {
environment.put(javax.management.remote.JMXConnector.CREDENTIALS, Arrays.asList(this.jmxConfig.getUsername(), this.jmxConfig.getPassword()));
}
if (jmxConfig.isOpenSSL() != null && this.jmxConfig.isOpenSSL()) {
environment.put(Context.SECURITY_PROTOCOL, "ssl");
SslRMIClientSocketFactory clientSocketFactory = new SslRMIClientSocketFactory();
environment.put(RMIConnectorServer.RMI_CLIENT_SOCKET_FACTORY_ATTRIBUTE, clientSocketFactory);
environment.put("com.sun.jndi.rmi.factory.socket", clientSocketFactory);
}
jmxConnector = JMXConnectorFactory.connect(new JMXServiceURL(jmxUrl), environment);
LOGGER.info("JMX connect success, host:{} port:{}.", host, port); LOGGER.info("JMX connect success, host:{} port:{}.", host, port);
return true; return true;
} catch (MalformedURLException e) { } catch (MalformedURLException e) {

View File

@@ -69,6 +69,19 @@ public class LogicalClusterMetadataManager {
return LOGICAL_CLUSTER_ID_BROKER_ID_MAP.getOrDefault(logicClusterId, new HashSet<>()); return LOGICAL_CLUSTER_ID_BROKER_ID_MAP.getOrDefault(logicClusterId, new HashSet<>());
} }
public Long getTopicLogicalClusterId(Long physicalClusterId, String topicName) {
if (!LOADED.get()) {
flush();
}
Map<String, Long> logicalClusterIdMap = TOPIC_LOGICAL_MAP.get(physicalClusterId);
if (ValidateUtils.isNull(logicalClusterIdMap)) {
return null;
}
return logicalClusterIdMap.get(topicName);
}
public LogicalClusterDO getTopicLogicalCluster(Long physicalClusterId, String topicName) { public LogicalClusterDO getTopicLogicalCluster(Long physicalClusterId, String topicName) {
if (!LOADED.get()) { if (!LOADED.get()) {
flush(); flush();

View File

@@ -4,9 +4,11 @@ import com.xiaojukeji.kafka.manager.common.bizenum.KafkaBrokerRoleEnum;
import com.xiaojukeji.kafka.manager.common.constant.Constant; import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.constant.KafkaConstant; import com.xiaojukeji.kafka.manager.common.constant.KafkaConstant;
import com.xiaojukeji.kafka.manager.common.entity.KafkaVersion; import com.xiaojukeji.kafka.manager.common.entity.KafkaVersion;
import com.xiaojukeji.kafka.manager.common.utils.JsonUtils;
import com.xiaojukeji.kafka.manager.common.utils.ListUtils; import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO; import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils; import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.utils.jmx.JmxConfig;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.BrokerMetadata; import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.BrokerMetadata;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.ControllerData; import com.xiaojukeji.kafka.manager.common.zookeeper.znode.ControllerData;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.TopicMetadata; import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.TopicMetadata;
@@ -118,8 +120,15 @@ public class PhysicalClusterMetadataManager {
return; return;
} }
JmxConfig jmxConfig = null;
try {
jmxConfig = JsonUtils.stringToObj(clusterDO.getJmxProperties(), JmxConfig.class);
} catch (Exception e) {
LOGGER.error("class=PhysicalClusterMetadataManager||method=addNew||clusterDO={}||msg=parse jmx properties failed", JsonUtils.toJSONString(clusterDO));
}
//增加Broker监控 //增加Broker监控
BrokerStateListener brokerListener = new BrokerStateListener(clusterDO.getId(), zkConfig, configUtils.getJmxMaxConn()); BrokerStateListener brokerListener = new BrokerStateListener(clusterDO.getId(), zkConfig, jmxConfig);
brokerListener.init(); brokerListener.init();
zkConfig.watchChildren(ZkPathUtil.BROKER_IDS_ROOT, brokerListener); zkConfig.watchChildren(ZkPathUtil.BROKER_IDS_ROOT, brokerListener);
@@ -280,7 +289,7 @@ public class PhysicalClusterMetadataManager {
//---------------------------Broker元信息相关-------------- //---------------------------Broker元信息相关--------------
public static void putBrokerMetadata(Long clusterId, Integer brokerId, BrokerMetadata brokerMetadata, Integer jmxMaxConn) { public static void putBrokerMetadata(Long clusterId, Integer brokerId, BrokerMetadata brokerMetadata, JmxConfig jmxConfig) {
Map<Integer, BrokerMetadata> metadataMap = BROKER_METADATA_MAP.get(clusterId); Map<Integer, BrokerMetadata> metadataMap = BROKER_METADATA_MAP.get(clusterId);
if (metadataMap == null) { if (metadataMap == null) {
return; return;
@@ -288,7 +297,7 @@ public class PhysicalClusterMetadataManager {
metadataMap.put(brokerId, brokerMetadata); metadataMap.put(brokerId, brokerMetadata);
Map<Integer, JmxConnectorWrap> jmxMap = JMX_CONNECTOR_MAP.getOrDefault(clusterId, new ConcurrentHashMap<>()); Map<Integer, JmxConnectorWrap> jmxMap = JMX_CONNECTOR_MAP.getOrDefault(clusterId, new ConcurrentHashMap<>());
jmxMap.put(brokerId, new JmxConnectorWrap(brokerMetadata.getHost(), brokerMetadata.getJmxPort(), jmxMaxConn)); jmxMap.put(brokerId, new JmxConnectorWrap(brokerMetadata.getHost(), brokerMetadata.getJmxPort(), jmxConfig));
JMX_CONNECTOR_MAP.put(clusterId, jmxMap); JMX_CONNECTOR_MAP.put(clusterId, jmxMap);
Map<Integer, KafkaVersion> versionMap = KAFKA_VERSION_MAP.getOrDefault(clusterId, new ConcurrentHashMap<>()); Map<Integer, KafkaVersion> versionMap = KAFKA_VERSION_MAP.getOrDefault(clusterId, new ConcurrentHashMap<>());

View File

@@ -203,6 +203,7 @@ public class ClusterServiceImpl implements ClusterService {
zk.close(); zk.close();
} }
} catch (Throwable t) { } catch (Throwable t) {
return false;
} }
} }
return true; return true;

View File

@@ -113,6 +113,7 @@ public class LogicalClusterServiceImpl implements LogicalClusterService {
LogicalCluster logicalCluster = new LogicalCluster(); LogicalCluster logicalCluster = new LogicalCluster();
logicalCluster.setLogicalClusterId(logicalClusterDO.getId()); logicalCluster.setLogicalClusterId(logicalClusterDO.getId());
logicalCluster.setLogicalClusterName(logicalClusterDO.getName()); logicalCluster.setLogicalClusterName(logicalClusterDO.getName());
logicalCluster.setLogicalClusterIdentification(logicalClusterDO.getIdentification());
logicalCluster.setClusterVersion( logicalCluster.setClusterVersion(
physicalClusterMetadataManager.getKafkaVersion( physicalClusterMetadataManager.getKafkaVersion(
logicalClusterDO.getClusterId(), logicalClusterDO.getClusterId(),

View File

@@ -13,9 +13,6 @@ public class ConfigUtils {
@Value(value = "${custom.idc}") @Value(value = "${custom.idc}")
private String idc; private String idc;
@Value("${custom.jmx.max-conn}")
private Integer jmxMaxConn;
@Value(value = "${spring.profiles.active}") @Value(value = "${spring.profiles.active}")
private String kafkaManagerEnv; private String kafkaManagerEnv;
@@ -30,14 +27,6 @@ public class ConfigUtils {
this.idc = idc; this.idc = idc;
} }
public Integer getJmxMaxConn() {
return jmxMaxConn;
}
public void setJmxMaxConn(Integer jmxMaxConn) {
this.jmxMaxConn = jmxMaxConn;
}
public String getKafkaManagerEnv() { public String getKafkaManagerEnv() {
return kafkaManagerEnv; return kafkaManagerEnv;
} }

View File

@@ -1,5 +1,6 @@
package com.xiaojukeji.kafka.manager.service.zookeeper; package com.xiaojukeji.kafka.manager.service.zookeeper;
import com.xiaojukeji.kafka.manager.common.utils.jmx.JmxConfig;
import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.BrokerMetadata; import com.xiaojukeji.kafka.manager.common.zookeeper.znode.brokers.BrokerMetadata;
import com.xiaojukeji.kafka.manager.common.zookeeper.StateChangeListener; import com.xiaojukeji.kafka.manager.common.zookeeper.StateChangeListener;
import com.xiaojukeji.kafka.manager.common.zookeeper.ZkConfigImpl; import com.xiaojukeji.kafka.manager.common.zookeeper.ZkConfigImpl;
@@ -22,12 +23,12 @@ public class BrokerStateListener implements StateChangeListener {
private ZkConfigImpl zkConfig; private ZkConfigImpl zkConfig;
private Integer jmxMaxConn; private JmxConfig jmxConfig;
public BrokerStateListener(Long clusterId, ZkConfigImpl zkConfig, Integer jmxMaxConn) { public BrokerStateListener(Long clusterId, ZkConfigImpl zkConfig, JmxConfig jmxConfig) {
this.clusterId = clusterId; this.clusterId = clusterId;
this.zkConfig = zkConfig; this.zkConfig = zkConfig;
this.jmxMaxConn = jmxMaxConn; this.jmxConfig = jmxConfig;
} }
@Override @Override
@@ -84,7 +85,7 @@ public class BrokerStateListener implements StateChangeListener {
} }
brokerMetadata.setClusterId(clusterId); brokerMetadata.setClusterId(clusterId);
brokerMetadata.setBrokerId(brokerId); brokerMetadata.setBrokerId(brokerId);
PhysicalClusterMetadataManager.putBrokerMetadata(clusterId, brokerId, brokerMetadata, jmxMaxConn); PhysicalClusterMetadataManager.putBrokerMetadata(clusterId, brokerId, brokerMetadata, jmxConfig);
} catch (Exception e) { } catch (Exception e) {
LOGGER.error("add broker failed, clusterId:{} brokerMetadata:{}.", clusterId, brokerMetadata, e); LOGGER.error("add broker failed, clusterId:{} brokerMetadata:{}.", clusterId, brokerMetadata, e);
} }

View File

@@ -12,6 +12,7 @@
<result column="zookeeper" property="zookeeper" /> <result column="zookeeper" property="zookeeper" />
<result column="bootstrap_servers" property="bootstrapServers" /> <result column="bootstrap_servers" property="bootstrapServers" />
<result column="security_properties" property="securityProperties" /> <result column="security_properties" property="securityProperties" />
<result column="jmx_properties" property="jmxProperties" />
</resultMap> </resultMap>
<insert id="insert" <insert id="insert"
@@ -19,9 +20,9 @@
useGeneratedKeys="true" useGeneratedKeys="true"
keyProperty="id"> keyProperty="id">
INSERT INTO cluster ( INSERT INTO cluster (
cluster_name, zookeeper, bootstrap_servers, security_properties cluster_name, zookeeper, bootstrap_servers, security_properties, jmx_properties
) VALUES ( ) VALUES (
#{clusterName}, #{zookeeper}, #{bootstrapServers}, #{securityProperties} #{clusterName}, #{zookeeper}, #{bootstrapServers}, #{securityProperties}, #{jmxProperties}
) )
</insert> </insert>
@@ -30,6 +31,7 @@
cluster_name=#{clusterName}, cluster_name=#{clusterName},
bootstrap_servers=#{bootstrapServers}, bootstrap_servers=#{bootstrapServers},
security_properties=#{securityProperties}, security_properties=#{securityProperties},
jmx_properties=#{jmxProperties},
status=#{status} status=#{status}
WHERE id = #{id} WHERE id = #{id}
</update> </update>

View File

@@ -1,24 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="LogicalClusterDao"> <mapper namespace="LogicalClusterDao">
<resultMap id="LogicalClusterMap" type="com.xiaojukeji.kafka.manager.common.entity.pojo.LogicalClusterDO"> <resultMap id="LogicalClusterMap" type="com.xiaojukeji.kafka.manager.common.entity.pojo.LogicalClusterDO">
<id column="id" property="id" /> <id column="id" property="id" />
<result column="gmt_create" property="gmtCreate" /> <result column="gmt_create" property="gmtCreate" />
<result column="gmt_modify" property="gmtModify" /> <result column="gmt_modify" property="gmtModify" />
<result column="name" property="name" /> <result column="name" property="name" />
<result column="app_id" property="appId" /> <result column="identification" property="identification" />
<result column="cluster_id" property="clusterId" /> <result column="app_id" property="appId" />
<result column="region_list" property="regionList" /> <result column="cluster_id" property="clusterId" />
<result column="mode" property="mode" /> <result column="region_list" property="regionList" />
<result column="description" property="description" /> <result column="mode" property="mode" />
<result column="description" property="description" />
</resultMap> </resultMap>
<insert id="insert" parameterType="com.xiaojukeji.kafka.manager.common.entity.pojo.LogicalClusterDO"> <insert id="insert" parameterType="com.xiaojukeji.kafka.manager.common.entity.pojo.LogicalClusterDO">
INSERT INTO logical_cluster INSERT INTO logical_cluster
(name, app_id, cluster_id, region_list, mode, description) (name, identification, app_id, cluster_id, region_list, mode, description)
VALUES VALUES
(#{name}, #{appId}, #{clusterId}, #{regionList}, #{mode}, #{description}) (#{name}, #{identification}, #{appId}, #{clusterId}, #{regionList}, #{mode}, #{description})
</insert> </insert>
<delete id="deleteById" parameterType="java.lang.Long"> <delete id="deleteById" parameterType="java.lang.Long">
@@ -27,7 +28,8 @@
<update id="updateById" parameterType="com.xiaojukeji.kafka.manager.common.entity.pojo.LogicalClusterDO"> <update id="updateById" parameterType="com.xiaojukeji.kafka.manager.common.entity.pojo.LogicalClusterDO">
UPDATE logical_cluster SET UPDATE logical_cluster SET
<!-- name=#{name}, 不允许修改 name, 会影响到上报的数据 --> name=#{name},
<!-- identification=#{identification}, 不允许修改 identification, 会影响到上报的数据 -->
cluster_id=#{clusterId}, cluster_id=#{clusterId},
region_list=#{regionList}, region_list=#{regionList},
description=#{description}, description=#{description},

View File

@@ -4,6 +4,7 @@ import com.xiaojukeji.kafka.manager.common.utils.ListUtils;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils; import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.monitor.common.entry.*; import com.xiaojukeji.kafka.manager.monitor.common.entry.*;
import com.xiaojukeji.kafka.manager.monitor.component.n9e.entry.*; import com.xiaojukeji.kafka.manager.monitor.component.n9e.entry.*;
import com.xiaojukeji.kafka.manager.monitor.component.n9e.entry.bizenum.CategoryEnum;
import java.util.*; import java.util.*;
@@ -44,7 +45,7 @@ public class N9eConverter {
if (!ValidateUtils.isNull(strategy.getId())) { if (!ValidateUtils.isNull(strategy.getId())) {
n9eStrategy.setId(strategy.getId().intValue()); n9eStrategy.setId(strategy.getId().intValue());
} }
n9eStrategy.setCategory(1); n9eStrategy.setCategory(CategoryEnum.DEVICE_INDEPENDENT.getCode());
n9eStrategy.setName(strategy.getName()); n9eStrategy.setName(strategy.getName());
n9eStrategy.setNid(monitorN9eNid); n9eStrategy.setNid(monitorN9eNid);
n9eStrategy.setExcl_nid(new ArrayList<>()); n9eStrategy.setExcl_nid(new ArrayList<>());
@@ -77,7 +78,13 @@ public class N9eConverter {
n9eStrategy.setRecovery_notify(0); n9eStrategy.setRecovery_notify(0);
StrategyAction strategyAction = strategy.getStrategyActionList().get(0); StrategyAction strategyAction = strategy.getStrategyActionList().get(0);
n9eStrategy.setConverge(ListUtils.string2IntList(strategyAction.getConverge()));
// 单位转换, 夜莺的单位是秒, KM前端的单位是分钟
List<Integer> convergeList = ListUtils.string2IntList(strategyAction.getConverge());
if (!ValidateUtils.isEmptyList(convergeList)) {
convergeList.set(0, convergeList.get(0) * 60);
}
n9eStrategy.setConverge(convergeList);
List<Integer> notifyGroups = new ArrayList<>(); List<Integer> notifyGroups = new ArrayList<>();
for (String name: ListUtils.string2StrList(strategyAction.getNotifyGroup())) { for (String name: ListUtils.string2StrList(strategyAction.getNotifyGroup())) {
@@ -167,7 +174,13 @@ public class N9eConverter {
} }
strategyAction.setNotifyGroup(ListUtils.strList2String(notifyGroups)); strategyAction.setNotifyGroup(ListUtils.strList2String(notifyGroups));
strategyAction.setConverge(ListUtils.intList2String(n9eStrategy.getConverge())); // 单位转换, 夜莺的单位是秒, KM前端的单位是分钟
List<Integer> convergeList = n9eStrategy.getConverge();
if (!ValidateUtils.isEmptyList(convergeList)) {
convergeList.set(0, convergeList.get(0) / 60);
}
strategyAction.setConverge(ListUtils.intList2String(convergeList));
strategyAction.setCallback(n9eStrategy.getCallback()); strategyAction.setCallback(n9eStrategy.getCallback());
strategy.setStrategyActionList(Arrays.asList(strategyAction)); strategy.setStrategyActionList(Arrays.asList(strategyAction));

View File

@@ -0,0 +1,23 @@
package com.xiaojukeji.kafka.manager.monitor.component.n9e.entry.bizenum;
public enum CategoryEnum {
DEVICE_RELATED(1, "设备相关"),
DEVICE_INDEPENDENT(2, "设备无关"),
;
private int code;
private String msg;
CategoryEnum(int code, String msg) {
this.code = code;
this.msg = msg;
}
public int getCode() {
return code;
}
public String getMsg() {
return msg;
}
}

View File

@@ -73,7 +73,7 @@ public class SinkCommunityTopicMetrics2Monitor extends AbstractScheduledTask<Clu
continue; continue;
} }
metricSinkPoints.addAll(recordTopics(now, logicalClusterDO.getName(), metrics)); metricSinkPoints.addAll(recordTopics(now, logicalClusterDO.getIdentification(), metrics));
if (metricSinkPoints.size() > MonitorSinkConstant.MONITOR_SYSTEM_SINK_THRESHOLD) { if (metricSinkPoints.size() > MonitorSinkConstant.MONITOR_SYSTEM_SINK_THRESHOLD) {
abstractMonitor.sinkMetrics(metricSinkPoints); abstractMonitor.sinkMetrics(metricSinkPoints);
metricSinkPoints.clear(); metricSinkPoints.clear();

View File

@@ -64,7 +64,7 @@ public class SinkConsumerMetrics2Monitor implements ApplicationListener<Consumer
continue; continue;
} }
metricSinkPoints.addAll(recordConsumer(elem.getTimestampUnitMs() / 1000, logicalClusterDO.getName(), elem)); metricSinkPoints.addAll(recordConsumer(elem.getTimestampUnitMs() / 1000, logicalClusterDO.getIdentification(), elem));
if (metricSinkPoints.size() > MonitorSinkConstant.MONITOR_SYSTEM_SINK_THRESHOLD) { if (metricSinkPoints.size() > MonitorSinkConstant.MONITOR_SYSTEM_SINK_THRESHOLD) {
abstractMonitor.sinkMetrics(metricSinkPoints); abstractMonitor.sinkMetrics(metricSinkPoints);
metricSinkPoints.clear(); metricSinkPoints.clear();

View File

@@ -57,7 +57,7 @@ public class SinkTopicThrottledMetrics2Monitor implements ApplicationListener<To
continue; continue;
} }
MetricSinkPoint point = recordTopicThrottled(startTime, logicalClusterDO.getName(), elem); MetricSinkPoint point = recordTopicThrottled(startTime, logicalClusterDO.getIdentification(), elem);
if (ValidateUtils.isNull(point)) { if (ValidateUtils.isNull(point)) {
continue; continue;
} }

View File

@@ -40,8 +40,7 @@ public class NormalAccountController {
public Result<List<AccountSummaryVO>> searchOnJobStaffByKeyWord(@RequestParam("keyWord") String keyWord) { public Result<List<AccountSummaryVO>> searchOnJobStaffByKeyWord(@RequestParam("keyWord") String keyWord) {
List<EnterpriseStaff> staffList = accountService.searchAccountByPrefix(keyWord); List<EnterpriseStaff> staffList = accountService.searchAccountByPrefix(keyWord);
if (ValidateUtils.isEmptyList(staffList)) { if (ValidateUtils.isEmptyList(staffList)) {
LOGGER.info("class=NormalAccountController||method=searchOnJobStaffByKeyWord||keyWord={}||msg=staffList is empty!" LOGGER.info("class=NormalAccountController||method=searchOnJobStaffByKeyWord||keyWord={}||msg=staffList is empty!", keyWord);
,keyWord);
return new Result<>(); return new Result<>();
} }
List<AccountSummaryVO> voList = new ArrayList<>(); List<AccountSummaryVO> voList = new ArrayList<>();

View File

@@ -69,7 +69,8 @@ public class NormalTopicController {
} }
return new Result<>(TopicModelConverter.convert2TopicBasicVO( return new Result<>(TopicModelConverter.convert2TopicBasicVO(
topicService.getTopicBasicDTO(physicalClusterId, topicName), topicService.getTopicBasicDTO(physicalClusterId, topicName),
clusterService.getById(physicalClusterId) clusterService.getById(physicalClusterId),
logicalClusterMetadataManager.getTopicLogicalClusterId(physicalClusterId, topicName)
)); ));
} }

View File

@@ -166,7 +166,7 @@ public class OpUtilsController {
if (!ResultStatus.SUCCESS.equals(rs)) { if (!ResultStatus.SUCCESS.equals(rs)) {
return Result.buildFrom(rs); return Result.buildFrom(rs);
} }
topicManagerService.modifyTopic(dto.getClusterId(), dto.getTopicName(), dto.getDescription(), operator); topicManagerService.modifyTopicByOp(dto.getClusterId(), dto.getTopicName(), dto.getAppId(), dto.getDescription(), operator);
return new Result(); return new Result();
} }

View File

@@ -55,6 +55,7 @@ public class ClusterModelConverter {
CopyUtils.copyProperties(vo, logicalCluster); CopyUtils.copyProperties(vo, logicalCluster);
vo.setClusterId(logicalCluster.getLogicalClusterId()); vo.setClusterId(logicalCluster.getLogicalClusterId());
vo.setClusterName(logicalCluster.getLogicalClusterName()); vo.setClusterName(logicalCluster.getLogicalClusterName());
vo.setClusterIdentification(logicalCluster.getLogicalClusterIdentification());
return vo; return vo;
} }
@@ -78,9 +79,8 @@ public class ClusterModelConverter {
ClusterDO clusterDO = new ClusterDO(); ClusterDO clusterDO = new ClusterDO();
CopyUtils.copyProperties(clusterDO, reqObj); CopyUtils.copyProperties(clusterDO, reqObj);
clusterDO.setId(reqObj.getClusterId()); clusterDO.setId(reqObj.getClusterId());
clusterDO.setSecurityProperties( clusterDO.setSecurityProperties(ValidateUtils.isNull(reqObj.getSecurityProperties())? "": reqObj.getSecurityProperties());
ValidateUtils.isNull(clusterDO.getSecurityProperties())? "": clusterDO.getSecurityProperties() clusterDO.setJmxProperties(ValidateUtils.isNull(reqObj.getJmxProperties())? "": reqObj.getJmxProperties());
);
return clusterDO; return clusterDO;
} }

View File

@@ -21,6 +21,7 @@ public class LogicalClusterModelConverter {
LogicalClusterVO vo = new LogicalClusterVO(); LogicalClusterVO vo = new LogicalClusterVO();
vo.setLogicalClusterId(logicalClusterDO.getId()); vo.setLogicalClusterId(logicalClusterDO.getId());
vo.setLogicalClusterName(logicalClusterDO.getName()); vo.setLogicalClusterName(logicalClusterDO.getName());
vo.setLogicalClusterIdentification(logicalClusterDO.getIdentification());
vo.setPhysicalClusterId(logicalClusterDO.getClusterId()); vo.setPhysicalClusterId(logicalClusterDO.getClusterId());
vo.setMode(logicalClusterDO.getMode()); vo.setMode(logicalClusterDO.getMode());
vo.setRegionIdList(ListUtils.string2LongList(logicalClusterDO.getRegionList())); vo.setRegionIdList(ListUtils.string2LongList(logicalClusterDO.getRegionList()));
@@ -45,6 +46,7 @@ public class LogicalClusterModelConverter {
public static LogicalClusterDO convert2LogicalClusterDO(LogicalClusterDTO dto) { public static LogicalClusterDO convert2LogicalClusterDO(LogicalClusterDTO dto) {
LogicalClusterDO logicalClusterDO = new LogicalClusterDO(); LogicalClusterDO logicalClusterDO = new LogicalClusterDO();
logicalClusterDO.setName(dto.getName()); logicalClusterDO.setName(dto.getName());
logicalClusterDO.setIdentification(dto.getIdentification());
logicalClusterDO.setClusterId(dto.getClusterId()); logicalClusterDO.setClusterId(dto.getClusterId());
logicalClusterDO.setRegionList(ListUtils.longList2String(dto.getRegionIdList())); logicalClusterDO.setRegionList(ListUtils.longList2String(dto.getRegionIdList()));
logicalClusterDO.setMode(dto.getMode()); logicalClusterDO.setMode(dto.getMode());

View File

@@ -22,9 +22,9 @@ import java.util.List;
* @date 2017/6/1. * @date 2017/6/1.
*/ */
public class TopicModelConverter { public class TopicModelConverter {
public static TopicBasicVO convert2TopicBasicVO(TopicBasicDTO dto, ClusterDO clusterDO) { public static TopicBasicVO convert2TopicBasicVO(TopicBasicDTO dto, ClusterDO clusterDO, Long logicalClusterId) {
TopicBasicVO vo = new TopicBasicVO(); TopicBasicVO vo = new TopicBasicVO();
vo.setClusterId(dto.getClusterId()); vo.setClusterId(logicalClusterId);
vo.setAppId(dto.getAppId()); vo.setAppId(dto.getAppId());
vo.setAppName(dto.getAppName()); vo.setAppName(dto.getAppName());
vo.setPartitionNum(dto.getPartitionNum()); vo.setPartitionNum(dto.getPartitionNum());

View File

@@ -11,7 +11,7 @@ spring:
name: kafkamanager name: kafkamanager
datasource: datasource:
kafka-manager: kafka-manager:
jdbc-url: jdbc:mysql://127.0.0.1:3306/kafka_manager?characterEncoding=UTF-8&serverTimezone=GMT%2B8 jdbc-url: jdbc:mysql://127.0.0.1:3306/logi_kafka_manager?characterEncoding=UTF-8&serverTimezone=GMT%2B8
username: admin username: admin
password: admin password: admin
driver-class-name: com.mysql.jdbc.Driver driver-class-name: com.mysql.jdbc.Driver