mirror of
https://github.com/didi/KnowStreaming.git
synced 2025-12-24 11:52:08 +08:00
Compare commits
173 Commits
v2.5.0
...
v2.8.1_fef
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
af27e48117 | ||
|
|
b16a7b9bff | ||
|
|
e81c0f3040 | ||
|
|
462303fca0 | ||
|
|
4405703e42 | ||
|
|
23e398e121 | ||
|
|
b17bb89d04 | ||
|
|
5590cebf8f | ||
|
|
1fa043f09d | ||
|
|
3bd0af1451 | ||
|
|
1545962745 | ||
|
|
d032571681 | ||
|
|
33fb0acc7e | ||
|
|
1ec68a91e2 | ||
|
|
a23c113a46 | ||
|
|
371ae2c0a5 | ||
|
|
8f8f6ffa27 | ||
|
|
475fe0d91f | ||
|
|
3d74e60d03 | ||
|
|
83ac83bb28 | ||
|
|
8478fb857c | ||
|
|
7074bdaa9f | ||
|
|
58164294cc | ||
|
|
7c0e9df156 | ||
|
|
bd62212ecb | ||
|
|
2292039b42 | ||
|
|
73f8da8d5a | ||
|
|
e51dbe0ca7 | ||
|
|
482a375e31 | ||
|
|
689c5ce455 | ||
|
|
734a020ecc | ||
|
|
44d537f78c | ||
|
|
b4c60eb910 | ||
|
|
e120b32375 | ||
|
|
de54966d30 | ||
|
|
39a6302c18 | ||
|
|
05ceeea4b0 | ||
|
|
9f8e3373a8 | ||
|
|
42521cbae4 | ||
|
|
b23c35197e | ||
|
|
70f28d9ac4 | ||
|
|
912d73d98a | ||
|
|
2a720fce6f | ||
|
|
e4534c359f | ||
|
|
b91bec15f2 | ||
|
|
67ad5cacb7 | ||
|
|
b4a739476a | ||
|
|
a7bf2085db | ||
|
|
c3802cf48b | ||
|
|
54711c4491 | ||
|
|
fcb52a69c0 | ||
|
|
1b632f9754 | ||
|
|
73d7a0ecdc | ||
|
|
08943593b3 | ||
|
|
c949a88f20 | ||
|
|
a49c11f655 | ||
|
|
a66aed4a88 | ||
|
|
0045c953a0 | ||
|
|
fdce41b451 | ||
|
|
4d5e4d0f00 | ||
|
|
82c9b6481e | ||
|
|
053d4dcb18 | ||
|
|
e1b2c442aa | ||
|
|
0ed8ba8ca4 | ||
|
|
f195847c68 | ||
|
|
5beb13b17e | ||
|
|
7d9ec05062 | ||
|
|
fc604a9eaf | ||
|
|
4f3c1ad9b6 | ||
|
|
6d45ed586c | ||
|
|
1afb633b4f | ||
|
|
34d9f9174b | ||
|
|
3b0c208eff | ||
|
|
05022f8db4 | ||
|
|
3336de457a | ||
|
|
10a27bc29c | ||
|
|
542e5d3c2d | ||
|
|
7372617b14 | ||
|
|
89735a130b | ||
|
|
859cf74bd6 | ||
|
|
e2744ab399 | ||
|
|
16bd065098 | ||
|
|
71c52e6dd7 | ||
|
|
a7f8c3ced3 | ||
|
|
f3f0432c65 | ||
|
|
426ba2d150 | ||
|
|
2790099efa | ||
|
|
f6ba8bc95e | ||
|
|
d6181522c0 | ||
|
|
04cf071ca6 | ||
|
|
e4371b5d02 | ||
|
|
52c52b2a0d | ||
|
|
8f40f10575 | ||
|
|
fe0f6fcd0b | ||
|
|
31b1ad8bb4 | ||
|
|
373680d854 | ||
|
|
9e3bc80495 | ||
|
|
52ccaeffd5 | ||
|
|
18136c12fd | ||
|
|
dec3f9e75e | ||
|
|
ccc0ee4d18 | ||
|
|
69e9708080 | ||
|
|
5944ba099a | ||
|
|
ada2718b5e | ||
|
|
1f87bd63e7 | ||
|
|
c0f3259cf6 | ||
|
|
e1d5749a40 | ||
|
|
a8d7eb27d9 | ||
|
|
1eecdf3829 | ||
|
|
be8b345889 | ||
|
|
074da389b3 | ||
|
|
4df2dc09fe | ||
|
|
e8d42ba074 | ||
|
|
c036483680 | ||
|
|
2818584db6 | ||
|
|
37585f760d | ||
|
|
f5477a03a1 | ||
|
|
50388425b2 | ||
|
|
725c59eab0 | ||
|
|
7bf1de29a4 | ||
|
|
d90c3fc7dd | ||
|
|
80785ce072 | ||
|
|
44ea896de8 | ||
|
|
d30cb8a0f0 | ||
|
|
6c7b333b34 | ||
|
|
6d34a00e77 | ||
|
|
1f353e10ce | ||
|
|
4e10f8d1c5 | ||
|
|
a22cd853fc | ||
|
|
354e0d6a87 | ||
|
|
dfabe28645 | ||
|
|
fce230da48 | ||
|
|
055ba9bda6 | ||
|
|
ec19c3b4dd | ||
|
|
37aa526404 | ||
|
|
86c1faa40f | ||
|
|
8dcf15d0f9 | ||
|
|
6835e1e680 | ||
|
|
d8f89b8f67 | ||
|
|
ec28eba781 | ||
|
|
5ef8fff5bc | ||
|
|
4f317b76fa | ||
|
|
61672637dc | ||
|
|
ecf6e8f664 | ||
|
|
4115975320 | ||
|
|
21904a8609 | ||
|
|
10b0a3dabb | ||
|
|
b2091e9aed | ||
|
|
f2cb5bd77c | ||
|
|
19c61c52e6 | ||
|
|
b327359183 | ||
|
|
9e9bb72e17 | ||
|
|
a23907e009 | ||
|
|
ad131f5a2c | ||
|
|
dbeae4ca68 | ||
|
|
0fb0e94848 | ||
|
|
95d2a82d35 | ||
|
|
39cccd568e | ||
|
|
19b7f6ad8c | ||
|
|
41c000cf47 | ||
|
|
1b8ea61e87 | ||
|
|
4538593236 | ||
|
|
8086ef355b | ||
|
|
60d038fe46 | ||
|
|
ff0f4463be | ||
|
|
820571d993 | ||
|
|
fffc0c3add | ||
|
|
022f9eb551 | ||
|
|
6e7b82cfcb | ||
|
|
b5fb24b360 | ||
|
|
b77345222c | ||
|
|
793e81406e | ||
|
|
cef1ec95d2 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -111,3 +111,4 @@ dist/
|
||||
dist/*
|
||||
kafka-manager-web/src/main/resources/templates/
|
||||
.DS_Store
|
||||
kafka-manager-console/package-lock.json
|
||||
|
||||
41
Dockerfile
Normal file
41
Dockerfile
Normal file
@@ -0,0 +1,41 @@
|
||||
ARG MAVEN_VERSION=3.8.4-openjdk-8-slim
|
||||
ARG JAVA_VERSION=8-jdk-alpine3.9
|
||||
FROM maven:${MAVEN_VERSION} AS builder
|
||||
ARG CONSOLE_ENABLE=true
|
||||
|
||||
WORKDIR /opt
|
||||
COPY . .
|
||||
COPY distribution/conf/settings.xml /root/.m2/settings.xml
|
||||
|
||||
# whether to build console
|
||||
RUN set -eux; \
|
||||
if [ $CONSOLE_ENABLE = 'false' ]; then \
|
||||
sed -i "/kafka-manager-console/d" pom.xml; \
|
||||
fi \
|
||||
&& mvn -Dmaven.test.skip=true clean install -U
|
||||
|
||||
FROM openjdk:${JAVA_VERSION}
|
||||
|
||||
RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && apk add --no-cache tini
|
||||
|
||||
ENV TZ=Asia/Shanghai
|
||||
ENV AGENT_HOME=/opt/agent/
|
||||
|
||||
COPY --from=builder /opt/kafka-manager-web/target/kafka-manager.jar /opt
|
||||
COPY --from=builder /opt/container/dockerfiles/docker-depends/config.yaml $AGENT_HOME
|
||||
COPY --from=builder /opt/container/dockerfiles/docker-depends/jmx_prometheus_javaagent-0.15.0.jar $AGENT_HOME
|
||||
COPY --from=builder /opt/distribution/conf/application-docker.yml /opt
|
||||
|
||||
WORKDIR /opt
|
||||
|
||||
ENV JAVA_AGENT="-javaagent:$AGENT_HOME/jmx_prometheus_javaagent-0.15.0.jar=9999:$AGENT_HOME/config.yaml"
|
||||
ENV JAVA_HEAP_OPTS="-Xms1024M -Xmx1024M -Xmn100M "
|
||||
ENV JAVA_OPTS="-verbose:gc \
|
||||
-XX:MaxMetaspaceSize=256M -XX:+DisableExplicitGC -XX:+UseStringDeduplication \
|
||||
-XX:+UseG1GC -XX:+HeapDumpOnOutOfMemoryError -XX:-UseContainerSupport"
|
||||
|
||||
EXPOSE 8080 9999
|
||||
|
||||
ENTRYPOINT ["tini", "--"]
|
||||
|
||||
CMD [ "sh", "-c", "java -jar $JAVA_AGENT $JAVA_HEAP_OPTS $JAVA_OPTS kafka-manager.jar --spring.config.location=application-docker.yml"]
|
||||
53
README.md
53
README.md
@@ -5,7 +5,7 @@
|
||||
|
||||
**一站式`Apache Kafka`集群指标监控与运维管控平台**
|
||||
|
||||
|
||||
`LogiKM开源至今备受关注,考虑到开源项目应该更贴合Apache Kafka未来发展方向,经项目组慎重考虑,预计22年下半年将其品牌升级成Know Streaming,届时项目名称和Logo也将统一更新,感谢大家一如既往的支持,敬请期待!`
|
||||
|
||||
阅读本README文档,您可以了解到滴滴Logi-KafkaManager的用户群体、产品定位等信息,并通过体验地址,快速体验Kafka集群指标监控与运维管控的全流程。
|
||||
|
||||
@@ -55,41 +55,56 @@
|
||||
## 2 相关文档
|
||||
|
||||
### 2.1 产品文档
|
||||
- [滴滴Logi-KafkaManager 安装手册](docs/install_guide/install_guide_cn.md)
|
||||
- [滴滴Logi-KafkaManager 接入集群](docs/user_guide/add_cluster/add_cluster.md)
|
||||
- [滴滴Logi-KafkaManager 用户使用手册](docs/user_guide/user_guide_cn.md)
|
||||
- [滴滴Logi-KafkaManager FAQ](docs/user_guide/faq.md)
|
||||
- [滴滴LogiKM 安装手册](docs/install_guide/install_guide_cn.md)
|
||||
- [滴滴LogiKM 接入集群](docs/user_guide/add_cluster/add_cluster.md)
|
||||
- [滴滴LogiKM 用户使用手册](docs/user_guide/user_guide_cn.md)
|
||||
- [滴滴LogiKM FAQ](docs/user_guide/faq.md)
|
||||
|
||||
### 2.2 社区文章
|
||||
- [滴滴云官网产品介绍](https://www.didiyun.com/production/logi-KafkaManager.html)
|
||||
- [7年沉淀之作--滴滴Logi日志服务套件](https://mp.weixin.qq.com/s/-KQp-Qo3WKEOc9wIR2iFnw)
|
||||
- [滴滴Logi-KafkaManager 一站式Kafka监控与管控平台](https://mp.weixin.qq.com/s/9qSZIkqCnU6u9nLMvOOjIQ)
|
||||
- [滴滴Logi-KafkaManager 开源之路](https://xie.infoq.cn/article/0223091a99e697412073c0d64)
|
||||
- [滴滴Logi-KafkaManager 系列视频教程](https://mp.weixin.qq.com/s/9X7gH0tptHPtfjPPSdGO8g)
|
||||
- [kafka实践(十五):滴滴开源Kafka管控平台 Logi-KafkaManager研究--A叶子叶来](https://blog.csdn.net/yezonggang/article/details/113106244)
|
||||
- [kafka的灵魂伴侣Logi-KafkaManager系列文章专栏 --石臻](https://blog.csdn.net/u010634066/category_10977588.html)
|
||||
- [滴滴LogiKM 一站式Kafka监控与管控平台](https://mp.weixin.qq.com/s/9qSZIkqCnU6u9nLMvOOjIQ)
|
||||
- [滴滴LogiKM 开源之路](https://xie.infoq.cn/article/0223091a99e697412073c0d64)
|
||||
- [滴滴LogiKM 系列视频教程](https://space.bilibili.com/442531657/channel/seriesdetail?sid=571649)
|
||||
- [kafka最强最全知识图谱](https://www.szzdzhp.com/kafka/)
|
||||
- [滴滴LogiKM新用户入门系列文章专栏 --石臻臻](https://www.szzdzhp.com/categories/LogIKM/)
|
||||
- [kafka实践(十五):滴滴开源Kafka管控平台 LogiKM研究--A叶子叶来](https://blog.csdn.net/yezonggang/article/details/113106244)
|
||||
- [基于云原生应用管理平台Rainbond安装 滴滴LogiKM](https://www.rainbond.com/docs/opensource-app/logikm/?channel=logikm)
|
||||
|
||||
## 3 滴滴Logi开源用户交流群
|
||||
|
||||
|
||||

|
||||
微信加群:添加mike_zhangliang的微信号备注Logi加群或关注公众号 云原生可观测性 回复 "Logi加群"
|
||||
|
||||
想跟各个大佬交流Kafka Es 等中间件/大数据相关技术请 加微信进群。
|
||||
|
||||
微信加群:添加<font color=red>mike_zhangliang</font>、<font color=red>danke-x</font>的微信号备注Logi加群或关注公众号 云原生可观测性 回复 "Logi加群"
|
||||
|
||||
## 4 知识星球
|
||||
|
||||

|
||||
<img width="447" alt="image" src="https://user-images.githubusercontent.com/71620349/147314042-843a371a-48c0-4d9a-a65e-ca40236f3300.png">
|
||||
|
||||
✅知识星球首个【Kafka中文社区】,内测期免费加入~https://z.didi.cn/5gSF9
|
||||
有问必答~!
|
||||
互动有礼~!
|
||||
1600+群友一起共建国内最专业的【Kafka中文社区】
|
||||
PS:提问请尽量把问题一次性描述清楚,并告知环境信息情况哦~!如使用版本、操作步骤、报错/警告信息等,方便嘉宾们快速解答~
|
||||
<br>
|
||||
<center>
|
||||
✅我们正在组建国内最大最权威的
|
||||
</center>
|
||||
<br>
|
||||
<center>
|
||||
<font color=red size=5><b>【Kafka中文社区】</b></font>
|
||||
</center>
|
||||
|
||||
在这里你可以结交各大互联网Kafka大佬以及3000+Kafka爱好者,一起实现知识共享,实时掌控最新行业资讯,期待您的加入中~https://z.didi.cn/5gSF9
|
||||
|
||||
<font color=red size=5>有问必答~! </font>
|
||||
|
||||
<font color=red size=5>互动有礼~! </font>
|
||||
|
||||
PS:提问请尽量把问题一次性描述清楚,并告知环境信息情况哦~!如使用版本、操作步骤、报错/警告信息等,方便大V们快速解答~
|
||||
|
||||
## 5 项目成员
|
||||
|
||||
### 5.1 内部核心人员
|
||||
|
||||
`iceyuhui`、`liuyaguang`、`limengmonty`、`zhangliangmike`、`nullhuangyiming`、`zengqiao`、`eilenexuzhe`、`huangjiaweihjw`、`zhaoyinrui`、`marzkonglingxu`、`joysunchao`
|
||||
`iceyuhui`、`liuyaguang`、`limengmonty`、`zhangliangmike`、`zhaoqingrong`、`xiepeng`、`nullhuangyiming`、`zengqiao`、`eilenexuzhe`、`huangjiaweihjw`、`zhaoyinrui`、`marzkonglingxu`、`joysunchao`、`石臻臻`
|
||||
|
||||
|
||||
### 5.2 外部贡献者
|
||||
|
||||
@@ -7,6 +7,39 @@
|
||||
|
||||
---
|
||||
|
||||
## v2.6.0
|
||||
|
||||
版本上线时间:2022-01-24
|
||||
|
||||
### 能力提升
|
||||
- 增加简单回退工具类
|
||||
|
||||
### 体验优化
|
||||
- 补充周期任务说明文档
|
||||
- 补充集群安装部署使用说明文档
|
||||
- 升级Swagger、SpringFramework、SpringBoot、EChats版本
|
||||
- 优化Task模块的日志输出
|
||||
- 优化corn表达式解析失败后退出无任何日志提示问题
|
||||
- Ldap用户接入时,增加部门及邮箱信息等
|
||||
- 对Jmx模块,增加连接失败后的回退机制及错误日志优化
|
||||
- 增加线程池、客户端池可配置
|
||||
- 删除无用的jmx_prometheus_javaagent-0.14.0.jar
|
||||
- 优化迁移任务名称
|
||||
- 优化创建Region时,Region容量信息不能立即被更新问题
|
||||
- 引入lombok
|
||||
- 更新视频教程
|
||||
- 优化kcm_script.sh脚本中的LogiKM地址为可通过程序传入
|
||||
- 第三方接口及网关接口,增加是否跳过登录的开关
|
||||
- extends模块相关配置调整为非必须在application.yml中配置
|
||||
|
||||
### bug修复
|
||||
- 修复批量往DB写入空指标数组时报SQL语法异常的问题
|
||||
- 修复网关增加配置及修改配置时,version不变化问题
|
||||
- 修复集群列表页,提示框遮挡问题
|
||||
- 修复对高版本Broker元信息协议解析失败的问题
|
||||
- 修复Dockerfile执行时提示缺少application.yml文件的问题
|
||||
- 修复逻辑集群更新时,会报空指针的问题
|
||||
|
||||
## v2.4.1+
|
||||
|
||||
版本上线时间:2021-05-21
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
FROM openjdk:16-jdk-alpine3.13
|
||||
|
||||
LABEL author="fengxsong"
|
||||
RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && apk add --no-cache tini
|
||||
|
||||
ENV VERSION 2.4.2
|
||||
WORKDIR /opt/
|
||||
|
||||
ENV AGENT_HOME /opt/agent/
|
||||
COPY docker-depends/config.yaml $AGENT_HOME
|
||||
COPY docker-depends/jmx_prometheus_javaagent-0.15.0.jar $AGENT_HOME
|
||||
|
||||
ENV JAVA_AGENT="-javaagent:$AGENT_HOME/jmx_prometheus_javaagent-0.15.0.jar=9999:$AGENT_HOME/config.yaml"
|
||||
ENV JAVA_HEAP_OPTS="-Xms1024M -Xmx1024M -Xmn100M "
|
||||
ENV JAVA_OPTS="-verbose:gc \
|
||||
-XX:MaxMetaspaceSize=256M -XX:+DisableExplicitGC -XX:+UseStringDeduplication \
|
||||
-XX:+UseG1GC -XX:+HeapDumpOnOutOfMemoryError -XX:-UseContainerSupport"
|
||||
|
||||
RUN wget https://github.com/didi/Logi-KafkaManager/releases/download/v${VERSION}/kafka-manager-${VERSION}.tar.gz && \
|
||||
tar xvf kafka-manager-${VERSION}.tar.gz && \
|
||||
mv kafka-manager-${VERSION}/kafka-manager.jar /opt/app.jar && \
|
||||
rm -rf kafka-manager-${VERSION}*
|
||||
|
||||
EXPOSE 8080 9999
|
||||
|
||||
ENTRYPOINT ["tini", "--"]
|
||||
|
||||
CMD [ "sh", "-c", "java -jar $JAVA_AGENT $JAVA_HEAP_OPTS $JAVA_OPTS app.jar --spring.config.location=application.yml"]
|
||||
Binary file not shown.
13
container/dockerfiles/mysql/Dockerfile
Normal file
13
container/dockerfiles/mysql/Dockerfile
Normal file
@@ -0,0 +1,13 @@
|
||||
FROM mysql:5.7.37
|
||||
|
||||
COPY mysqld.cnf /etc/mysql/mysql.conf.d/
|
||||
ENV TZ=Asia/Shanghai
|
||||
ENV MYSQL_ROOT_PASSWORD=root
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt -y install wget \
|
||||
&& wget https://ghproxy.com/https://raw.githubusercontent.com/didi/LogiKM/master/distribution/conf/create_mysql_table.sql -O /docker-entrypoint-initdb.d/create_mysql_table.sql
|
||||
|
||||
EXPOSE 3306
|
||||
|
||||
VOLUME ["/var/lib/mysql"]
|
||||
24
container/dockerfiles/mysql/mysqld.cnf
Normal file
24
container/dockerfiles/mysql/mysqld.cnf
Normal file
@@ -0,0 +1,24 @@
|
||||
[client]
|
||||
default-character-set = utf8
|
||||
|
||||
[mysqld]
|
||||
character_set_server = utf8
|
||||
pid-file = /var/run/mysqld/mysqld.pid
|
||||
socket = /var/run/mysqld/mysqld.sock
|
||||
datadir = /var/lib/mysql
|
||||
symbolic-links=0
|
||||
|
||||
max_allowed_packet = 10M
|
||||
sort_buffer_size = 1M
|
||||
read_rnd_buffer_size = 2M
|
||||
max_connections=2000
|
||||
|
||||
lower_case_table_names=1
|
||||
character-set-server=utf8
|
||||
|
||||
max_allowed_packet = 1G
|
||||
sql_mode=STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION
|
||||
group_concat_max_len = 102400
|
||||
default-time-zone = '+08:00'
|
||||
[mysql]
|
||||
default-character-set = utf8
|
||||
@@ -55,7 +55,7 @@ data:
|
||||
didi:
|
||||
app-topic-metrics-enabled: false
|
||||
topic-request-time-metrics-enabled: false
|
||||
topic-throttled-metrics: false
|
||||
topic-throttled-metrics-enabled: false
|
||||
save-days: 7
|
||||
|
||||
# 任务相关的开关
|
||||
@@ -67,7 +67,16 @@ data:
|
||||
# ldap settings
|
||||
ldap:
|
||||
enabled: false
|
||||
authUserRegistration: false
|
||||
url: ldap://127.0.0.1:389/
|
||||
basedn: dc=tsign,dc=cn
|
||||
factory: com.sun.jndi.ldap.LdapCtxFactory
|
||||
filter: sAMAccountName
|
||||
security:
|
||||
authentication: simple
|
||||
principal: cn=admin,dc=tsign,dc=cn
|
||||
credentials: admin
|
||||
auth-user-registration: false
|
||||
auth-user-registration-role: normal
|
||||
|
||||
kcm:
|
||||
enabled: false
|
||||
|
||||
28
distribution/conf/application-docker.yml
Normal file
28
distribution/conf/application-docker.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
|
||||
## kafka-manager的配置文件,该文件中的配置会覆盖默认配置
|
||||
## 下面的配置信息基本就是jar中的 application.yml默认配置了;
|
||||
## 可以只修改自己变更的配置,其他的删除就行了; 比如只配置一下mysql
|
||||
|
||||
|
||||
server:
|
||||
port: 8080
|
||||
tomcat:
|
||||
accept-count: 1000
|
||||
max-connections: 10000
|
||||
max-threads: 800
|
||||
min-spare-threads: 100
|
||||
|
||||
spring:
|
||||
application:
|
||||
name: kafkamanager
|
||||
version: 2.6.0
|
||||
profiles:
|
||||
active: dev
|
||||
datasource:
|
||||
kafka-manager:
|
||||
jdbc-url: jdbc:mysql://${LOGI_MYSQL_HOST:mysql}:${LOGI_MYSQL_PORT:3306}/${LOGI_MYSQL_DATABASE:logi_kafka_manager}?characterEncoding=UTF-8&useSSL=false&serverTimezone=GMT%2B8
|
||||
username: ${LOGI_MYSQL_USER:root}
|
||||
password: ${LOGI_MYSQL_PASSWORD:root}
|
||||
driver-class-name: com.mysql.cj.jdbc.Driver
|
||||
main:
|
||||
allow-bean-definition-overriding: true
|
||||
@@ -26,7 +26,6 @@ spring:
|
||||
main:
|
||||
allow-bean-definition-overriding: true
|
||||
|
||||
|
||||
servlet:
|
||||
multipart:
|
||||
max-file-size: 100MB
|
||||
@@ -36,29 +35,58 @@ logging:
|
||||
config: classpath:logback-spring.xml
|
||||
|
||||
custom:
|
||||
idc: cn # 部署的数据中心, 忽略该配置, 后续会进行删除
|
||||
jmx:
|
||||
max-conn: 10 # 2.3版本配置不在这个地方生效
|
||||
idc: cn
|
||||
store-metrics-task:
|
||||
community:
|
||||
broker-metrics-enabled: true # 社区部分broker metrics信息收集开关, 关闭之后metrics信息将不会进行收集及写DB
|
||||
topic-metrics-enabled: true # 社区部分topic的metrics信息收集开关, 关闭之后metrics信息将不会进行收集及写DB
|
||||
didi:
|
||||
app-topic-metrics-enabled: false # 滴滴埋入的指标, 社区AK不存在该指标,因此默认关闭
|
||||
topic-request-time-metrics-enabled: false # 滴滴埋入的指标, 社区AK不存在该指标,因此默认关闭
|
||||
topic-throttled-metrics: false # 滴滴埋入的指标, 社区AK不存在该指标,因此默认关闭
|
||||
save-days: 7 #指标在DB中保持的天数,-1表示永久保存,7表示保存近7天的数据
|
||||
topic-metrics-enabled: true
|
||||
didi: # 滴滴Kafka特有的指标
|
||||
app-topic-metrics-enabled: false
|
||||
topic-request-time-metrics-enabled: false
|
||||
topic-throttled-metrics-enabled: false
|
||||
|
||||
# 任务相关的开关
|
||||
# 任务相关的配置
|
||||
task:
|
||||
op:
|
||||
sync-topic-enabled: false # 未落盘的Topic定期同步到DB中
|
||||
order-auto-exec: # 工单自动化审批线程的开关
|
||||
topic-enabled: false # Topic工单自动化审批开关, false:关闭自动化审批, true:开启
|
||||
app-enabled: false # App工单自动化审批开关, false:关闭自动化审批, true:开启
|
||||
sync-topic-enabled: false # 未落盘的Topic定期同步到DB中
|
||||
order-auto-exec: # 工单自动化审批线程的开关
|
||||
topic-enabled: false # Topic工单自动化审批开关, false:关闭自动化审批, true:开启
|
||||
app-enabled: false # App工单自动化审批开关, false:关闭自动化审批, true:开启
|
||||
metrics:
|
||||
collect: # 收集指标
|
||||
broker-metrics-enabled: true # 收集Broker指标
|
||||
sink: # 上报指标
|
||||
cluster-metrics: # 上报cluster指标
|
||||
sink-db-enabled: true # 上报到db
|
||||
broker-metrics: # 上报broker指标
|
||||
sink-db-enabled: true # 上报到db
|
||||
delete: # 删除指标
|
||||
delete-limit-size: 1000 # 单次删除的批大小
|
||||
cluster-metrics-save-days: 14 # 集群指标保存天数
|
||||
broker-metrics-save-days: 14 # Broker指标保存天数
|
||||
topic-metrics-save-days: 7 # Topic指标保存天数
|
||||
topic-request-time-metrics-save-days: 7 # Topic请求耗时指标保存天数
|
||||
topic-throttled-metrics-save-days: 7 # Topic限流指标保存天数
|
||||
app-topic-metrics-save-days: 7 # App+Topic指标保存天数
|
||||
|
||||
thread-pool:
|
||||
collect-metrics:
|
||||
thread-num: 256 # 收集指标线程池大小
|
||||
queue-size: 5000 # 收集指标线程池的queue大小
|
||||
api-call:
|
||||
thread-num: 16 # api服务线程池大小
|
||||
queue-size: 5000 # api服务线程池的queue大小
|
||||
|
||||
client-pool:
|
||||
kafka-consumer:
|
||||
min-idle-client-num: 24 # 最小空闲客户端数
|
||||
max-idle-client-num: 24 # 最大空闲客户端数
|
||||
max-total-client-num: 24 # 最大客户端数
|
||||
borrow-timeout-unit-ms: 3000 # 租借超时时间,单位毫秒
|
||||
|
||||
# ldap相关的配置
|
||||
account:
|
||||
jump-login:
|
||||
gateway-api: false # 网关接口
|
||||
third-part-api: false # 第三方接口
|
||||
ldap:
|
||||
enabled: false
|
||||
url: ldap://127.0.0.1:389/
|
||||
@@ -72,28 +100,20 @@ account:
|
||||
auth-user-registration: true
|
||||
auth-user-registration-role: normal
|
||||
|
||||
# 集群升级部署相关的功能,需要配合夜莺及S3进行使用
|
||||
kcm:
|
||||
enabled: false
|
||||
s3:
|
||||
kcm: # 集群安装部署,仅安装broker
|
||||
enabled: false # 是否开启
|
||||
s3: # s3 存储服务
|
||||
endpoint: s3.didiyunapi.com
|
||||
access-key: 1234567890
|
||||
secret-key: 0987654321
|
||||
bucket: logi-kafka
|
||||
n9e:
|
||||
base-url: http://127.0.0.1:8004
|
||||
user-token: 12345678
|
||||
timeout: 300
|
||||
account: root
|
||||
script-file: kcm_script.sh
|
||||
|
||||
# 监控告警相关的功能,需要配合夜莺进行使用
|
||||
# enabled: 表示是否开启监控告警的功能, true: 开启, false: 不开启
|
||||
# n9e.nid: 夜莺的节点ID
|
||||
# n9e.user-token: 用户的密钥,在夜莺的个人设置中
|
||||
# n9e.mon.base-url: 监控地址
|
||||
# n9e.sink.base-url: 数据上报地址
|
||||
# n9e.rdb.base-url: 用户资源中心地址
|
||||
n9e: # 夜莺
|
||||
base-url: http://127.0.0.1:8004 # 夜莺job服务地址
|
||||
user-token: 12345678 # 用户的token
|
||||
timeout: 300 # 当台操作的超时时间
|
||||
account: root # 操作时使用的账号
|
||||
script-file: kcm_script.sh # 脚本,已内置好,在源码的kcm模块内,此处配置无需修改
|
||||
logikm-url: http://127.0.0.1:8080 # logikm部署地址,部署时kcm_script.sh会调用logikm检查部署中的一些状态
|
||||
|
||||
monitor:
|
||||
enabled: false
|
||||
@@ -107,10 +127,9 @@ monitor:
|
||||
rdb:
|
||||
base-url: http://127.0.0.1:8000 # 夜莺v4版本,默认端口统一调整为了8000
|
||||
|
||||
|
||||
notify: # 通知的功能
|
||||
kafka: # 默认通知发送到kafka的指定Topic中
|
||||
cluster-id: 95 # Topic的集群ID
|
||||
topic-name: didi-kafka-notify # Topic名称
|
||||
order: # 部署的KM的地址
|
||||
detail-url: http://127.0.0.1
|
||||
notify:
|
||||
kafka:
|
||||
cluster-id: 95
|
||||
topic-name: didi-kafka-notify
|
||||
order:
|
||||
detail-url: http://127.0.0.1
|
||||
|
||||
@@ -13,6 +13,9 @@ CREATE TABLE `account` (
|
||||
`username` varchar(128) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '' COMMENT '用户名',
|
||||
`password` varchar(128) NOT NULL DEFAULT '' COMMENT '密码',
|
||||
`role` tinyint(8) NOT NULL DEFAULT '0' COMMENT '角色类型, 0:普通用户 1:研发 2:运维',
|
||||
`department` varchar(256) DEFAULT '' COMMENT '部门名',
|
||||
`display_name` varchar(256) DEFAULT '' COMMENT '用户姓名',
|
||||
`mail` varchar(256) DEFAULT '' COMMENT '邮箱',
|
||||
`status` int(16) NOT NULL DEFAULT '0' COMMENT '0标识使用中,-1标识已废弃',
|
||||
`gmt_create` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
|
||||
`gmt_modify` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
|
||||
@@ -588,4 +591,7 @@ CREATE TABLE `work_order` (
|
||||
`gmt_create` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
|
||||
`gmt_modify` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='工单表';
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='工单表';
|
||||
|
||||
|
||||
ALTER TABLE `topic_connections` ADD COLUMN `client_id` VARCHAR(1024) NOT NULL DEFAULT '' COMMENT '客户端ID' AFTER `client_version`;
|
||||
|
||||
10
distribution/conf/settings.xml
Normal file
10
distribution/conf/settings.xml
Normal file
@@ -0,0 +1,10 @@
|
||||
<settings>
|
||||
<mirrors>
|
||||
<mirror>
|
||||
<id>aliyunmaven</id>
|
||||
<mirrorOf>*</mirrorOf>
|
||||
<name>阿里云公共仓库</name>
|
||||
<url>https://maven.aliyun.com/repository/public</url>
|
||||
</mirror>
|
||||
</mirrors>
|
||||
</settings>
|
||||
@@ -39,4 +39,14 @@ ALTER TABLE `gateway_config`
|
||||
ADD COLUMN `description` TEXT NULL COMMENT '描述信息' AFTER `version`;
|
||||
```
|
||||
|
||||
### 升级至`2.6.0`版本
|
||||
|
||||
#### 1.mysql变更
|
||||
`2.6.0`版本在`account`表增加用户姓名,部门名,邮箱三个字段,因此需要执行下面的sql进行字段的增加。
|
||||
|
||||
```sql
|
||||
ALTER TABLE `account`
|
||||
ADD COLUMN `display_name` VARCHAR(256) NOT NULL DEFAULT '' COMMENT '用户名' AFTER `role`,
|
||||
ADD COLUMN `department` VARCHAR(256) NOT NULL DEFAULT '' COMMENT '部门名' AFTER `display_name`,
|
||||
ADD COLUMN `mail` VARCHAR(256) NOT NULL DEFAULT '' COMMENT '邮箱' AFTER `department`;
|
||||
```
|
||||
|
||||
47
docs/dev_guide/LogiKM单元测试和集成测试.md
Normal file
47
docs/dev_guide/LogiKM单元测试和集成测试.md
Normal file
@@ -0,0 +1,47 @@
|
||||
|
||||
---
|
||||
|
||||

|
||||
|
||||
**一站式`Apache Kafka`集群指标监控与运维管控平台**
|
||||
|
||||
---
|
||||
|
||||
|
||||
# LogiKM单元测试和集成测试
|
||||
|
||||
## 1、单元测试
|
||||
### 1.1 单元测试介绍
|
||||
单元测试又称模块测试,是针对软件设计的最小单位——程序模块进行正确性检验的测试工作。
|
||||
其目的在于检查每个程序单元能否正确实现详细设计说明中的模块功能、性能、接口和设计约束等要求,
|
||||
发现各模块内部可能存在的各种错误。单元测试需要从程序的内部结构出发设计测试用例。
|
||||
多个模块可以平行地独立进行单元测试。
|
||||
|
||||
### 1.2 LogiKM单元测试思路
|
||||
LogiKM单元测试思路主要是测试Service层的方法,通过罗列方法的各种参数,
|
||||
判断方法返回的结果是否符合预期。单元测试的基类加了@SpringBootTest注解,即每次运行单测用例都启动容器
|
||||
|
||||
### 1.3 LogiKM单元测试注意事项
|
||||
1. 单元测试用例在kafka-manager-core以及kafka-manager-extends下的test包中
|
||||
2. 配置在resources/application.yml,包括运行单元测试用例启用的数据库配置等等
|
||||
3. 编译打包项目时,加上参数-DskipTests可不执行测试用例,例如使用命令行mvn -DskipTests进行打包
|
||||
|
||||
|
||||
|
||||
|
||||
## 2、集成测试
|
||||
### 2.1 集成测试介绍
|
||||
集成测试又称组装测试,是一种黑盒测试。通常在单元测试的基础上,将所有的程序模块进行有序的、递增的测试。
|
||||
集成测试是检验程序单元或部件的接口关系,逐步集成为符合概要设计要求的程序部件或整个系统。
|
||||
|
||||
### 2.2 LogiKM集成测试思路
|
||||
LogiKM集成测试主要思路是对Controller层的接口发送Http请求。
|
||||
通过罗列测试用例,模拟用户的操作,对接口发送Http请求,判断结果是否达到预期。
|
||||
本地运行集成测试用例时,无需加@SpringBootTest注解(即无需每次运行测试用例都启动容器)
|
||||
|
||||
### 2.3 LogiKM集成测试注意事项
|
||||
1. 集成测试用例在kafka-manager-web的test包下
|
||||
2. 因为对某些接口发送Http请求需要先登陆,比较麻烦,可以绕过登陆,方法可见教程见docs -> user_guide -> call_api_bypass_login
|
||||
3. 集成测试的配置在resources/integrationTest-settings.properties文件下,包括集群地址,zk地址的配置等等
|
||||
4. 如果需要运行集成测试用例,需要本地先启动LogiKM项目
|
||||
5. 编译打包项目时,加上参数-DskipTests可不执行测试用例,例如使用命令行mvn -DskipTests进行打包
|
||||
BIN
docs/dev_guide/assets/kcm/kcm_principle.png
Normal file
BIN
docs/dev_guide/assets/kcm/kcm_principle.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 69 KiB |
@@ -29,6 +29,7 @@
|
||||
- `JMX`配置错误:见`2、解决方法`。
|
||||
- 存在防火墙或者网络限制:网络通的另外一台机器`telnet`试一下看是否可以连接上。
|
||||
- 需要进行用户名及密码的认证:见`3、解决方法 —— 认证的JMX`。
|
||||
- 当logikm和kafka不在同一台机器上时,kafka的Jmx端口不允许其他机器访问:见`4、解决方法`。
|
||||
|
||||
|
||||
错误日志例子:
|
||||
@@ -98,4 +99,9 @@ fi
|
||||
SQL的例子:
|
||||
```sql
|
||||
UPDATE cluster SET jmx_properties='{ "maxConn": 10, "username": "xxxxx", "password": "xxxx", "openSSL": false }' where id={xxx};
|
||||
```
|
||||
```
|
||||
### 4、解决方法 —— 不允许其他机器访问
|
||||

|
||||
|
||||
该图中的127.0.0.1表明该端口只允许本机访问.
|
||||
在cdh中可以点击配置->搜索jmx->寻找broker_java_opts 修改com.sun.management.jmxremote.host和java.rmi.server.hostname为本机ip
|
||||
|
||||
89
docs/dev_guide/drawio/KCM实现原理.drawio
Normal file
89
docs/dev_guide/drawio/KCM实现原理.drawio
Normal file
@@ -0,0 +1,89 @@
|
||||
<mxfile host="65bd71144e">
|
||||
<diagram id="bhaMuW99Q1BzDTtcfRXp" name="Page-1">
|
||||
<mxGraphModel dx="1138" dy="830" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="1169" pageHeight="827" math="0" shadow="0">
|
||||
<root>
|
||||
<mxCell id="0"/>
|
||||
<mxCell id="1" parent="0"/>
|
||||
<mxCell id="11" value="待部署Kafka-Broker的机器" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;dashed=1;" vertex="1" parent="1">
|
||||
<mxGeometry x="380" y="240" width="320" height="240" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="24" value="" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;dashed=1;fillColor=#eeeeee;strokeColor=#36393d;" vertex="1" parent="1">
|
||||
<mxGeometry x="410" y="310" width="260" height="160" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="6" style="edgeStyle=none;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="2" target="3">
|
||||
<mxGeometry relative="1" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="7" value="调用夜莺接口,<br>创建集群安装部署任务" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="6">
|
||||
<mxGeometry x="-0.0875" y="1" relative="1" as="geometry">
|
||||
<mxPoint x="9" y="1" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="9" style="edgeStyle=none;html=1;" edge="1" parent="1" source="2" target="4">
|
||||
<mxGeometry relative="1" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="10" value="通过版本管理,将Kafka的安装包,<br>server配置上传到s3中" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="9">
|
||||
<mxGeometry x="0.0125" y="2" relative="1" as="geometry">
|
||||
<mxPoint as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="2" value="LogiKM" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="40" y="100" width="120" height="40" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="12" style="edgeStyle=none;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;" edge="1" parent="1" source="3" target="5">
|
||||
<mxGeometry relative="1" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="13" value="1、下发任务脚本(kcm_script.sh);<br>2、下发任务操作命令;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="12">
|
||||
<mxGeometry x="-0.0731" y="2" relative="1" as="geometry">
|
||||
<mxPoint x="-2" y="-16" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="3" value="夜莺——任务中心" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;" vertex="1" parent="1">
|
||||
<mxGeometry x="480" y="100" width="120" height="40" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="4" value="S3" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="40" y="310" width="120" height="40" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="5" value="夜莺——Agent(<font color="#ff3333">代理执行kcm_script.sh脚本</font>)" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#d5e8d4;strokeColor=#82b366;" vertex="1" parent="1">
|
||||
<mxGeometry x="400" y="260" width="280" height="40" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="22" style="edgeStyle=orthogonalEdgeStyle;html=1;entryX=1;entryY=0.5;entryDx=0;entryDy=0;fontColor=#FF3333;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="14" target="4">
|
||||
<mxGeometry relative="1" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="25" value="下载安装包" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontColor=#000000;" vertex="1" connectable="0" parent="22">
|
||||
<mxGeometry x="0.2226" y="-2" relative="1" as="geometry">
|
||||
<mxPoint x="27" y="2" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="14" value="执行kcm_script.sh脚本:下载安装包" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#eeeeee;strokeColor=#36393d;" vertex="1" parent="1">
|
||||
<mxGeometry x="425" y="320" width="235" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="18" value="执行kcm_script.sh脚本:安装" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#eeeeee;strokeColor=#36393d;" vertex="1" parent="1">
|
||||
<mxGeometry x="425" y="350" width="235" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="19" value="执行kcm_script.sh脚本:检查安装结果" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#eeeeee;strokeColor=#36393d;" vertex="1" parent="1">
|
||||
<mxGeometry x="425" y="380" width="235" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="23" style="edgeStyle=orthogonalEdgeStyle;html=1;entryX=0.5;entryY=0;entryDx=0;entryDy=0;fontColor=#FF3333;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="20" target="2">
|
||||
<mxGeometry relative="1" as="geometry">
|
||||
<Array as="points">
|
||||
<mxPoint x="770" y="420"/>
|
||||
<mxPoint x="770" y="40"/>
|
||||
<mxPoint x="100" y="40"/>
|
||||
</Array>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="26" value="检查副本同步状态" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontColor=#000000;" vertex="1" connectable="0" parent="23">
|
||||
<mxGeometry x="-0.3344" relative="1" as="geometry">
|
||||
<mxPoint as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="20" value="执行kcm_script.sh脚本:检查副本同步" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#eeeeee;strokeColor=#36393d;" vertex="1" parent="1">
|
||||
<mxGeometry x="425" y="410" width="235" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="21" value="执行kcm_script.sh脚本:结束" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#eeeeee;strokeColor=#36393d;" vertex="1" parent="1">
|
||||
<mxGeometry x="425" y="440" width="235" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
</root>
|
||||
</mxGraphModel>
|
||||
</diagram>
|
||||
</mxfile>
|
||||
@@ -136,7 +136,8 @@ EXPIRED_TOPIC_CONFIG
|
||||
配置Value:
|
||||
```json
|
||||
{
|
||||
"minExpiredDay": 30, #过期时间大于此值才显示
|
||||
"minExpiredDay": 30, #过期时间大于此值才显示,
|
||||
"filterRegex": ".*XXX\\s+", #忽略符合此正则规则的Topic
|
||||
"ignoreClusterIdList": [ # 忽略的集群
|
||||
50
|
||||
]
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
|
||||
---
|
||||
|
||||

|
||||
|
||||
**一站式`Apache Kafka`集群指标监控与运维管控平台**
|
||||
|
||||
---
|
||||
|
||||
# 升级至`2.2.0`版本
|
||||
|
||||
`2.2.0`版本在`cluster`表及`logical_cluster`各增加了一个字段,因此需要执行下面的sql进行字段的增加。
|
||||
|
||||
```sql
|
||||
# 往cluster表中增加jmx_properties字段, 这个字段会用于存储jmx相关的认证以及配置信息
|
||||
ALTER TABLE `cluster` ADD COLUMN `jmx_properties` TEXT NULL COMMENT 'JMX配置' AFTER `security_properties`;
|
||||
|
||||
# 往logical_cluster中增加identification字段, 同时数据和原先name数据相同, 最后增加一个唯一键.
|
||||
# 此后, name字段还是表示集群名称, 而identification字段表示的是集群标识, 只能是字母数字及下划线组成,
|
||||
# 数据上报到监控系统时, 集群这个标识采用的字段就是identification字段, 之前使用的是name字段.
|
||||
ALTER TABLE `logical_cluster` ADD COLUMN `identification` VARCHAR(192) NOT NULL DEFAULT '' COMMENT '逻辑集群标识' AFTER `name`;
|
||||
|
||||
UPDATE `logical_cluster` SET `identification`=`name` WHERE id>=0;
|
||||
|
||||
ALTER TABLE `logical_cluster` ADD INDEX `uniq_identification` (`identification` ASC);
|
||||
```
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
|
||||
---
|
||||
|
||||

|
||||
|
||||
**一站式`Apache Kafka`集群指标监控与运维管控平台**
|
||||
|
||||
---
|
||||
|
||||
# 升级至`2.3.0`版本
|
||||
|
||||
`2.3.0`版本在`gateway_config`表增加了一个描述说明的字段,因此需要执行下面的sql进行字段的增加。
|
||||
|
||||
```sql
|
||||
ALTER TABLE `gateway_config`
|
||||
ADD COLUMN `description` TEXT NULL COMMENT '描述信息' AFTER `version`;
|
||||
```
|
||||
39
docs/dev_guide/周期任务说明文档.md
Normal file
39
docs/dev_guide/周期任务说明文档.md
Normal file
@@ -0,0 +1,39 @@
|
||||
---
|
||||
|
||||

|
||||
|
||||
**一站式`Apache Kafka`集群指标监控与运维管控平台**
|
||||
|
||||
---
|
||||
|
||||
|
||||
| 定时任务名称或方法名 | 所在类 | 详细说明 | cron | cron说明 | 线程数量 |
|
||||
| -------------------------------------- | -------------------------------------- | ------------------------------------------ | --------------- | --------------------------------------- | -------- |
|
||||
| calKafkaBill | CalKafkaTopicBill | 计算Kafka使用账单 | 0 0 1 * * ? | 每天凌晨1点执行一次 | 1 |
|
||||
| calRegionCapacity | CalRegionCapacity | 计算Region容量 | 0 0 0/12 * * ? | 每隔12小时执行一次,在0分钟0秒时触发 | 1 |
|
||||
| calTopicStatistics | CalTopicStatistics | 定时计算Topic统计数据 | 0 0 0/4 * * ? | 每隔4小时执行一次,在0分钟0秒时触发 | 5 |
|
||||
| flushBrokerTable | FlushBrokerTable | 定时刷新BrokerTable数据 | 0 0 0/1 * * ? | 每隔1小时执行一次,在0分钟0秒时触发 | 1 |
|
||||
| flushExpiredTopic | FlushExpiredTopic | 定期更新过期Topic | 0 0 0/5 * * ? | 每隔5小时执行一次,在0分钟0秒时触发 | 1 |
|
||||
| syncClusterTaskState | SyncClusterTaskState | 同步更新集群任务状态 | 0 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的0秒时触发 | 1 |
|
||||
| newCollectAndPublishCGData | CollectAndPublishCGData | 收集并发布消费者指标数据 | 30 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的30秒时触发 | 10 |
|
||||
| collectAndPublishCommunityTopicMetrics | CollectAndPublishCommunityTopicMetrics | Topic社区指标收集 | 31 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的30秒时触发 | 5 |
|
||||
| collectAndPublishTopicThrottledMetrics | CollectAndPublishTopicThrottledMetrics | 收集和发布Topic限流信息 | 11 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的11秒时触发 | 5 |
|
||||
| deleteMetrics | DeleteMetrics | 定期删除Metrics信息 | 0 0/2 * * * ? | 每隔2分钟执行一次,在每分钟的0秒时触发 | 1 |
|
||||
| storeDiDiAppTopicMetrics | StoreDiDiAppTopicMetrics | JMX中获取appId维度的流量信息存DB | 41 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的41秒时触发 | 5 |
|
||||
| storeDiDiTopicRequestTimeMetrics | StoreDiDiTopicRequestTimeMetrics | JMX中获取的TopicRequestTimeMetrics信息存DB | 51 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的51秒时触发 | 5 |
|
||||
| autoHandleTopicOrder | AutoHandleTopicOrder | 定时自动处理Topic相关工单 | 0 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的0秒时触发 | 1 |
|
||||
| automatedHandleOrder | AutomatedHandleOrder | 工单自动化审批 | 0 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的0秒时触发 | 1 |
|
||||
| flushReassignment | FlushReassignment | 定时处理分区迁移任务 | 0 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的0秒时触发 | 1 |
|
||||
| syncTopic2DB | SyncTopic2DB | 定期将未落盘的Topic刷新到DB中 | 0 0/2 * * * ? | 每隔2分钟执行一次,在每分钟的0秒时触发 | 1 |
|
||||
| sinkCommunityTopicMetrics2Monitor | SinkCommunityTopicMetrics2Monitor | 定时上报Topic监控指标 | 1 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的1秒时触发 | 5 |
|
||||
| flush方法 | LogicalClusterMetadataManager | 定时刷新逻辑集群元数据到缓存中 | 0/30 * * * * ? | 每隔30秒执行一次 | 1 |
|
||||
| flush方法 | AccountServiceImpl | 定时刷新account信息到缓存中 | 0/5 * * * * ? | 每隔5秒执行一次 | 1 |
|
||||
| ipFlush方法 | HeartBeat | 定时获取管控平台所在机器IP等信息到DB | 0/10 * * * * ? | 每隔10秒执行一次 | 1 |
|
||||
| flushTopicMetrics方法 | FlushTopicMetrics | 定时刷新topic指标到缓存中 | 5 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的5秒时触发 | 1 |
|
||||
| schedule方法 | FlushBKConsumerGroupMetadata | 定时刷新broker上消费组信息到缓存中 | 15 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的15秒时触发 | 1 |
|
||||
| flush方法 | FlushClusterMetadata | 定时刷新物理集群元信息到缓存中 | 0/30 * * * * ? | 每隔30秒执行一次 | 1 |
|
||||
| flush方法 | FlushTopicProperties | 定时刷新物理集群配置到缓存中 | 25 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的25秒时触发 | 1 |
|
||||
| schedule方法 | FlushZKConsumerGroupMetadata | 定时刷新zk上的消费组信息到缓存中 | 35 0/1 * * * ? | 每隔1分钟执行一次,在每分钟的35秒时触发 | 1 |
|
||||
|
||||
|
||||
|
||||
89
docs/dev_guide/如何使用集群安装部署功能.md
Normal file
89
docs/dev_guide/如何使用集群安装部署功能.md
Normal file
@@ -0,0 +1,89 @@
|
||||
|
||||
---
|
||||
|
||||

|
||||
|
||||
**一站式`Apache Kafka`集群指标监控与运维管控平台**
|
||||
|
||||
---
|
||||
|
||||
# 如何使用集群安装部署功能?
|
||||
|
||||
[TOC]
|
||||
|
||||
## 1、实现原理
|
||||
|
||||

|
||||
|
||||
- LogiKM上传安装包到S3服务;
|
||||
- LogiKM调用夜莺-Job服务接口,创建执行[kcm_script.sh](https://github.com/didi/LogiKM/blob/master/kafka-manager-extends/kafka-manager-kcm/src/main/resources/kcm_script.sh)脚本的任务,kcm_script.sh脚本是安装部署Kafka集群的脚本;
|
||||
- 夜莺将任务脚本下发到具体的机器上,通过夜莺Agent执行该脚本;
|
||||
- kcm_script.sh脚本会进行Kafka-Broker的安装部署;
|
||||
|
||||
|
||||
---
|
||||
|
||||
## 2、使用方式
|
||||
|
||||
### 2.1、第一步:修改配置
|
||||
|
||||
**配置application.yml文件**
|
||||
```yaml
|
||||
#
|
||||
kcm:
|
||||
enabled: false # 是否开启,将其修改为true
|
||||
s3: # s3 存储服务
|
||||
endpoint: s3.didiyunapi.com
|
||||
access-key: 1234567890
|
||||
secret-key: 0987654321
|
||||
bucket: logi-kafka
|
||||
n9e: # 夜莺
|
||||
base-url: http://127.0.0.1:8004 # 夜莺job服务地址
|
||||
user-token: 12345678 # 用户的token
|
||||
timeout: 300 # 单台操作的超时时间
|
||||
account: root # 操作时使用的账号
|
||||
script-file: kcm_script.sh # 脚本,已内置好,在源码的kcm模块内,此处配置无需修改
|
||||
logikm-url: http://127.0.0.1:8080 # logikm部署地址,部署时kcm_script.sh会调用logikm检查部署中的一些状态,这里只需要填写 http://IP:PORT 就可以了
|
||||
|
||||
|
||||
account:
|
||||
jump-login:
|
||||
gateway-api: false # 网关接口
|
||||
third-part-api: false # 第三方接口,将其修改为true,即允许未登录情况下调用开放的第三方接口
|
||||
```
|
||||
|
||||
### 2.2、第二步:检查服务
|
||||
|
||||
**检查s3服务**
|
||||
- 测试 "运维管控-》集群运维-》版本管理" 页面的上传,查看等功能是否都正常。如果存在不正常,则需要查看s3的配置是否正确;
|
||||
- 如果都没有问题,则上传Kafka的以.tgz结尾的安装包以及server.properties文件;
|
||||
|
||||
**检查夜莺Job服务**
|
||||
- 创建一个job任务,机器选择需要安装Kafka集群的机器,然后执行的命令是echo "Hello LogiKM",看能否被成功执行。如果不行,则需要检查夜莺的安装;
|
||||
- 如果没有问题则表示夜莺和所需部署的机器之间的交互是没有问题的;
|
||||
|
||||
### 2.3、第三步:接入集群
|
||||
|
||||
在LogiKM的 “运维管控-》集群列表” 中接入需要安装部署的集群,**PS:此时是允许接入一个没有任何Broker的空的Kafka集群**,其中对的bootstrapServers配置搭建完成后的Kafka集群地址就可以了,而ZK地址必须和集群的server.properties中的ZK地址保持一致;
|
||||
|
||||
### 2.4、第四步:部署集群
|
||||
|
||||
- 打开LogiKM的 “运维管控-》集群运维-》集群任务” 页面,点击 “新建集群任务” 按钮;
|
||||
- 选择集群、任务类型、包版本、server配置及填写主机列表,然后点击确认,即可在夜莺的Job服务中心中创建一个任务出来。**PS:如果创建失败,可以看一下日志我为什么创建失败**;
|
||||
- 随后可以点击详情及状态对任务进行操作;
|
||||
|
||||
### 2.5、可能问题
|
||||
|
||||
#### 2.5.1、问题一:任务执行超时、失败等
|
||||
|
||||
进入夜莺Job服务中心,查看对应的任务的相关日志;
|
||||
|
||||
- 提示安装包下载失败,则需要查看对应的s3服务是否可以直接wget下载安装包,如果不可以则需要对kcm_script.sh脚本进行修改;
|
||||
- 提示调用LogiKM失败,则可以使用postman手动测试一下kcm_script.sh脚本调用LogiKM的那个接口是否有问题,如果存在问题则进行相应的修改;PS:具体接口见kcm_script.sh脚本
|
||||
|
||||
|
||||
## 3、备注说明
|
||||
|
||||
- 集群安装部署,仅安装部署Kafka-Broker,不安装Kafka的ZK服务;
|
||||
- 安装部署中,有任何定制化的需求,例如修改安装的目录等,可以通过修改kcm_script.sh脚本实现;
|
||||
- kcm_script.sh脚本位置:[kcm_script.sh](https://github.com/didi/LogiKM/blob/master/kafka-manager-extends/kafka-manager-kcm/src/main/resources/kcm_script.sh);
|
||||
97
docs/didi/Kafka主备切换流程简介.md
Normal file
97
docs/didi/Kafka主备切换流程简介.md
Normal file
@@ -0,0 +1,97 @@
|
||||
|
||||
---
|
||||
|
||||

|
||||
|
||||
**一站式`Apache Kafka`集群指标监控与运维管控平台**
|
||||
|
||||
---
|
||||
|
||||
# Kafka主备切换流程简介
|
||||
|
||||
## 1、客户端读写流程
|
||||
|
||||
在介绍Kafka主备切换流程之前,我们先来了解一下客户端通过我们自研的网关的大致读写流程。
|
||||
|
||||

|
||||
|
||||
|
||||
如上图所示,客户端读写流程大致为:
|
||||
1. 客户端:向网关请求Topic元信息;
|
||||
2. 网关:发现客户端使用的KafkaUser是A集群的KafkaUser,因此将Topic元信息请求转发到A集群;
|
||||
3. A集群:收到网关的Topic元信息,处理并返回给网关;
|
||||
4. 网关:将集群A返回的结果,返回给客户端;
|
||||
5. 客户端:从Topic元信息中,获取到Topic实际位于集群A,然后客户端会连接集群A进行生产消费;
|
||||
|
||||
**备注:客户端为Kafka原生客户端,无任何定制。**
|
||||
|
||||
---
|
||||
|
||||
## 2、主备切换流程
|
||||
|
||||
介绍完基于网关的客户端读写流程之后,我们再来看一下主备高可用版的Kafka,需要如何进行主备切换。
|
||||
|
||||
### 2.1、大体流程
|
||||
|
||||

|
||||
|
||||
图有点多,总结起来就是:
|
||||
1. 先阻止客户端数据的读写;
|
||||
2. 等待主备数据同步完成;
|
||||
3. 调整主备集群数据同步方向;
|
||||
4. 调整配置,引导客户端到备集群进行读写;
|
||||
|
||||
|
||||
### 2.2、详细操作
|
||||
|
||||
看完大体流程,我们再来看一下实际操作的命令。
|
||||
|
||||
```bash
|
||||
1. 阻止用户生产和消费
|
||||
bin/kafka-configs.sh --zookeeper ${主集群A的ZK地址} --entity-type users --entity-name ${客户端使用的kafkaUser} --add-config didi.ha.active.cluster=None --alter
|
||||
|
||||
|
||||
2. 等待FetcherLag 和 Offset 同步
|
||||
无需操作,仅需检查主备Topic的Offset是否一致了。
|
||||
|
||||
|
||||
3. 取消备集群B向主集群A进行同步数据的配置
|
||||
bin/kafka-configs.sh --zookeeper ${备集群B的ZK地址} --entity-type ha-topics --entity-name ${Topic名称} --delete-config didi.ha.remote.cluster --alter
|
||||
|
||||
|
||||
4. 增加主集群A向备集群B进行同步数据的配置
|
||||
bin/kafka-configs.sh --zookeeper ${主集群A的ZK地址} --entity-type ha-topics --entity-name ${Topic名称} --add-config didi.ha.remote.cluster=${备集群B的集群ID} --alter
|
||||
|
||||
|
||||
5. 修改主集群A,备集群B,网关中,kafkaUser对应的集群,从而引导请求走向备集群
|
||||
bin/kafka-configs.sh --zookeeper ${主集群A的ZK地址} --entity-type users --entity-name ${客户端使用的kafkaUser} --add-config didi.ha.active.cluster=${备集群B的集群ID} --alter
|
||||
|
||||
bin/kafka-configs.sh --zookeeper ${备集群B的ZK地址} --entity-type users --entity-name ${客户端使用的kafkaUser} --add-config didi.ha.active.cluster=${备集群B的集群ID} --alter
|
||||
|
||||
bin/kafka-configs.sh --zookeeper ${网关的ZK地址} --entity-type users --entity-name ${客户端使用的kafkaUser} --add-config didi.ha.active.cluster=${备集群B的集群ID} --alter
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3、FAQ
|
||||
|
||||
**问题一:使用中,有没有什么需要注意的地方?**
|
||||
|
||||
1. 主备切换是按照KafkaUser维度进行切换的,因此建议**不同服务之间,使用不同的KafkaUser**。这不仅有助于主备切换,也有助于做权限管控等。
|
||||
2. 在建立主备关系的过程中,如果主Topic的数据量比较大,建议逐步建立主备关系,避免一次性建立太多主备关系的Topic导致主集群需要被同步大量数据从而出现压力。
|
||||
|
||||
|
||||
**问题二:消费客户端如果重启之后,会不会导致变成从最旧或者最新的数据开始消费?**
|
||||
|
||||
不会。主备集群,会相互同步__consumer_offsets这个Topic的数据,因此客户端在主集群的消费进度信息,也会被同步到备集群,客户端在备集群进行消费时,也会从上次提交在主集群Topic的位置开始消费。
|
||||
|
||||
|
||||
**问题三:如果是类似Flink任务,是自己维护消费进度的程序,在主备切换之后,会不会存在数据丢失或者重复消费的情况?**
|
||||
|
||||
如果Flink自己管理好了消费进度,那么就不会。因为主备集群之间的数据同步就和一个集群内的副本同步一样,备集群会将主集群Topic中的Offset信息等都同步过来,因此不会。
|
||||
|
||||
|
||||
**问题四:可否做到不重启客户端?**
|
||||
|
||||
即将开发完成的高可用版Kafka二期将具备该能力,敬请期待。
|
||||
|
||||
BIN
docs/didi/assets/Kafka主备切换流程.png
Normal file
BIN
docs/didi/assets/Kafka主备切换流程.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 254 KiB |
BIN
docs/didi/assets/Kafka基于网关的生产消费流程.png
Normal file
BIN
docs/didi/assets/Kafka基于网关的生产消费流程.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 53 KiB |
367
docs/didi/drawio/Kafka主备切换流程.drawio
Normal file
367
docs/didi/drawio/Kafka主备切换流程.drawio
Normal file
@@ -0,0 +1,367 @@
|
||||
<mxfile host="65bd71144e">
|
||||
<diagram id="bhaMuW99Q1BzDTtcfRXp" name="Page-1">
|
||||
<mxGraphModel dx="1384" dy="785" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="1169" pageHeight="827" math="0" shadow="0">
|
||||
<root>
|
||||
<mxCell id="0"/>
|
||||
<mxCell id="1" parent="0"/>
|
||||
<mxCell id="81" value="1、主集群拒绝客户端的写入" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#FFFFFF;strokeColor=#d79b00;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;fontSize=16;" vertex="1" parent="1">
|
||||
<mxGeometry x="630" y="70" width="490" height="380" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="79" value="主备高可用集群稳定时的状态" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#FFFFFF;strokeColor=#d79b00;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;fontSize=16;" vertex="1" parent="1">
|
||||
<mxGeometry x="30" y="70" width="490" height="380" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="27" value="Kafka——主集群A" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;" parent="1" vertex="1">
|
||||
<mxGeometry x="200" y="100" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="32" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" parent="1" vertex="1">
|
||||
<mxGeometry x="210" y="110" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="33" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" parent="1" vertex="1">
|
||||
<mxGeometry x="210" y="150" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="36" value="Kafka网关" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" parent="1" vertex="1">
|
||||
<mxGeometry x="200" y="220" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="37" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" parent="1" vertex="1">
|
||||
<mxGeometry x="210" y="230" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="38" value="Kafka-Gateways" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" parent="1" vertex="1">
|
||||
<mxGeometry x="210" y="270" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="63" style="edgeStyle=orthogonalEdgeStyle;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="39" target="27">
|
||||
<mxGeometry relative="1" as="geometry">
|
||||
<Array as="points">
|
||||
<mxPoint x="440" y="380"/>
|
||||
<mxPoint x="440" y="140"/>
|
||||
</Array>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="64" value="备集群B 不断向 主集群A <br>发送Fetch请求,<br>从而同步主集群A的数据" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="63">
|
||||
<mxGeometry x="-0.05" y="-4" relative="1" as="geometry">
|
||||
<mxPoint x="6" y="-10" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="39" value="Kafka——备集群B" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" parent="1" vertex="1">
|
||||
<mxGeometry x="200" y="340" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="40" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" parent="1" vertex="1">
|
||||
<mxGeometry x="210" y="350" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="41" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" parent="1" vertex="1">
|
||||
<mxGeometry x="210" y="390" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="57" style="html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;strokeColor=default;startArrow=classic;startFill=1;" parent="1" source="42" target="27" edge="1">
|
||||
<mxGeometry relative="1" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="58" value="对主集群进行读写" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="57" vertex="1" connectable="0">
|
||||
<mxGeometry x="-0.0724" y="1" relative="1" as="geometry">
|
||||
<mxPoint x="-6" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="42" value="Kafka-Client" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" parent="1" vertex="1">
|
||||
<mxGeometry x="40" y="240" width="120" height="40" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="65" value="Kafka——主集群A" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;" vertex="1" parent="1">
|
||||
<mxGeometry x="800" y="100" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="66" value="Zookeeper(修改ZK)" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#FF3333;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="110" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="67" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="150" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="68" value="Kafka网关" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="800" y="220" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="69" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="230" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="70" value="Kafka-Gateways" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="270" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="71" style="edgeStyle=orthogonalEdgeStyle;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="73" target="65">
|
||||
<mxGeometry relative="1" as="geometry">
|
||||
<Array as="points">
|
||||
<mxPoint x="1040" y="380"/>
|
||||
<mxPoint x="1040" y="140"/>
|
||||
</Array>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="72" value="备集群B 不断向 主集群A<br>发送Fetch请求,<br>从而同步主集群A的数据" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="71">
|
||||
<mxGeometry x="-0.05" y="-4" relative="1" as="geometry">
|
||||
<mxPoint x="6" y="-10" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="73" value="Kafka——备集群B" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="800" y="340" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="74" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="350" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="75" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="390" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="76" style="html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;strokeColor=#FF3333;startArrow=none;startFill=0;strokeWidth=3;endArrow=none;endFill=0;dashed=1;" edge="1" parent="1" source="78" target="65">
|
||||
<mxGeometry relative="1" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="77" value="对主集群进行读写会出现失败" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontColor=#FF3333;fontSize=13;" vertex="1" connectable="0" parent="76">
|
||||
<mxGeometry x="-0.0724" y="1" relative="1" as="geometry">
|
||||
<mxPoint x="-6" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="78" value="Kafka-Client" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="640" y="240" width="120" height="40" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="82" value="2、等待主备同步完成,避免丢数据" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#FFFFFF;strokeColor=#d79b00;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;fontSize=16;" vertex="1" parent="1">
|
||||
<mxGeometry x="630" y="590" width="490" height="380" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="83" value="Kafka——主集群A" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;" vertex="1" parent="1">
|
||||
<mxGeometry x="800" y="620" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="84" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="630" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="85" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="670" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="86" value="Kafka网关" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="800" y="740" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="87" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="750" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="88" value="Kafka-Gateways" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="790" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="89" style="edgeStyle=orthogonalEdgeStyle;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="91" target="83">
|
||||
<mxGeometry relative="1" as="geometry">
|
||||
<Array as="points">
|
||||
<mxPoint x="1040" y="900"/>
|
||||
<mxPoint x="1040" y="660"/>
|
||||
</Array>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="90" value="备集群B 不断向 主集群A<br>发送Fetch请求,<br>从而同步主集群A的<br>指定Topic的数据" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="89">
|
||||
<mxGeometry x="-0.05" y="-4" relative="1" as="geometry">
|
||||
<mxPoint x="6" y="-10" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="91" value="Kafka——备集群B" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="800" y="860" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="92" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="870" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="93" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="910" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="94" style="html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;strokeColor=#FF3333;startArrow=none;startFill=0;strokeWidth=3;endArrow=none;endFill=0;dashed=1;" edge="1" parent="1" source="96" target="83">
|
||||
<mxGeometry relative="1" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="95" value="对主集群进行读写会出现失败" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontColor=#FF3333;fontSize=13;" vertex="1" connectable="0" parent="94">
|
||||
<mxGeometry x="-0.0724" y="1" relative="1" as="geometry">
|
||||
<mxPoint x="-6" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="96" value="Kafka-Client" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="640" y="760" width="120" height="40" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="97" value="3、Topic粒度数据同步方向调整,由主集群A向备集群B同步数据" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#FFFFFF;strokeColor=#d79b00;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;fontSize=16;" vertex="1" parent="1">
|
||||
<mxGeometry x="30" y="590" width="490" height="380" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="98" value="Kafka——主集群A" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;" vertex="1" parent="1">
|
||||
<mxGeometry x="200" y="620" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="99" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="630" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="100" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="670" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="101" value="Kafka网关" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="200" y="740" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="102" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="750" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="103" value="Kafka-Gateways" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="790" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="104" style="edgeStyle=orthogonalEdgeStyle;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;endArrow=none;endFill=0;strokeColor=#FF3333;strokeWidth=1;startArrow=classic;startFill=1;" edge="1" parent="1" source="106" target="98">
|
||||
<mxGeometry relative="1" as="geometry">
|
||||
<Array as="points">
|
||||
<mxPoint x="440" y="900"/>
|
||||
<mxPoint x="440" y="660"/>
|
||||
</Array>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="105" value="<span style="font-size: 11px;">主集群A 不断向 备集群B</span><br style="font-size: 11px;"><span style="font-size: 11px;">发送Fetch请求,</span><br style="font-size: 11px;"><span style="font-size: 11px;">从而同步备集群B的<br>指定Topic的数据</span>" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontColor=#FF3333;fontSize=13;" vertex="1" connectable="0" parent="104">
|
||||
<mxGeometry x="-0.05" y="-4" relative="1" as="geometry">
|
||||
<mxPoint x="-4" y="-10" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="106" value="Kafka——备集群B" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="200" y="860" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="107" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="870" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="108" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="910" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="109" style="html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;strokeColor=#FF3333;startArrow=none;startFill=0;strokeWidth=3;endArrow=none;endFill=0;dashed=1;" edge="1" parent="1" source="111" target="98">
|
||||
<mxGeometry relative="1" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="110" value="对主集群进行读写会出现失败" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontColor=#FF3333;fontSize=13;" vertex="1" connectable="0" parent="109">
|
||||
<mxGeometry x="-0.0724" y="1" relative="1" as="geometry">
|
||||
<mxPoint x="-6" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="111" value="Kafka-Client" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="40" y="760" width="120" height="40" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="127" value="4、修改ZK,使得客户端使用的KafkaUser对应的集群为备集群B" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#FFFFFF;strokeColor=#d79b00;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;fontSize=16;" vertex="1" parent="1">
|
||||
<mxGeometry x="30" y="1110" width="490" height="380" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="128" value="Kafka——主集群A" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;" vertex="1" parent="1">
|
||||
<mxGeometry x="200" y="1140" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="130" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="1190" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="131" value="Kafka网关" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="200" y="1260" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="132" value="Zookeeper(修改ZK)" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#FF3333;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="1270" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="133" value="Kafka-Gateways" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="1310" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="134" style="edgeStyle=orthogonalEdgeStyle;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;endArrow=none;endFill=0;strokeColor=#000000;strokeWidth=1;startArrow=classic;startFill=1;" edge="1" parent="1" source="136" target="128">
|
||||
<mxGeometry relative="1" as="geometry">
|
||||
<Array as="points">
|
||||
<mxPoint x="440" y="1420"/>
|
||||
<mxPoint x="440" y="1180"/>
|
||||
</Array>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="135" value="<span style="color: rgb(0 , 0 , 0) ; font-size: 11px">主集群A 不断向 备集群B</span><br style="color: rgb(0 , 0 , 0) ; font-size: 11px"><span style="color: rgb(0 , 0 , 0) ; font-size: 11px">发送Fetch请求,</span><br style="color: rgb(0 , 0 , 0) ; font-size: 11px"><span style="color: rgb(0 , 0 , 0) ; font-size: 11px">从而同步备集群B的<br>指定Topic的数据</span>" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontColor=#FF3333;fontSize=13;" vertex="1" connectable="0" parent="134">
|
||||
<mxGeometry x="-0.05" y="-4" relative="1" as="geometry">
|
||||
<mxPoint x="-4" y="-10" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="136" value="Kafka——备集群B" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="200" y="1380" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="138" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="1430" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="139" style="html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;strokeColor=#FF3333;startArrow=none;startFill=0;strokeWidth=3;endArrow=none;endFill=0;dashed=1;" edge="1" parent="1" source="141" target="128">
|
||||
<mxGeometry relative="1" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="140" value="对主集群进行读写会出现失败" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontColor=#FF3333;fontSize=13;" vertex="1" connectable="0" parent="139">
|
||||
<mxGeometry x="-0.0724" y="1" relative="1" as="geometry">
|
||||
<mxPoint x="-6" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="141" value="Kafka-Client" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="40" y="1280" width="120" height="40" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="142" value="5、重启客户端,网关将请求转向集群B" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#FFFFFF;strokeColor=#d79b00;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;fontSize=16;" vertex="1" parent="1">
|
||||
<mxGeometry x="630" y="1110" width="490" height="380" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="143" value="Kafka——主集群A" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;" vertex="1" parent="1">
|
||||
<mxGeometry x="800" y="1140" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="144" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="1150" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="145" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="1190" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="146" value="Kafka网关" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="800" y="1260" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="148" value="Kafka-Gateways" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="1310" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="149" style="edgeStyle=orthogonalEdgeStyle;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;endArrow=none;endFill=0;strokeColor=#000000;strokeWidth=1;startArrow=classic;startFill=1;" edge="1" parent="1" source="151" target="143">
|
||||
<mxGeometry relative="1" as="geometry">
|
||||
<Array as="points">
|
||||
<mxPoint x="1040" y="1420"/>
|
||||
<mxPoint x="1040" y="1180"/>
|
||||
</Array>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="150" value="<span style="color: rgb(0 , 0 , 0) ; font-size: 11px">主集群A 不断向 备集群B</span><br style="color: rgb(0 , 0 , 0) ; font-size: 11px"><span style="color: rgb(0 , 0 , 0) ; font-size: 11px">发送Fetch请求,</span><br style="color: rgb(0 , 0 , 0) ; font-size: 11px"><span style="color: rgb(0 , 0 , 0) ; font-size: 11px">从而同步备集群B的<br>指定Topic的数据</span>" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontColor=#FF3333;fontSize=13;" vertex="1" connectable="0" parent="149">
|
||||
<mxGeometry x="-0.05" y="-4" relative="1" as="geometry">
|
||||
<mxPoint x="-4" y="-10" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="151" value="Kafka——备集群B" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="800" y="1380" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="152" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="1390" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="153" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="1430" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="156" value="Kafka-Client" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="640" y="1280" width="120" height="40" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="157" style="html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;strokeColor=default;startArrow=classic;startFill=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="156" target="151">
|
||||
<mxGeometry relative="1" as="geometry">
|
||||
<mxPoint x="529.9966666666667" y="1400" as="sourcePoint"/>
|
||||
<mxPoint x="613.3299999999999" y="1300" as="targetPoint"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="158" value="对B集群进行读写" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="157">
|
||||
<mxGeometry x="-0.0724" y="1" relative="1" as="geometry">
|
||||
<mxPoint x="-6" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="159" value="Zookeeper(修改ZK)" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#FF3333;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="1150" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="160" value="Zookeeper(修改ZK)" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#FF3333;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="210" y="1390" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="161" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="810" y="1270" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="162" value="" style="shape=flexArrow;endArrow=classic;html=1;fontSize=13;fontColor=#FF3333;strokeColor=#000000;strokeWidth=1;fillColor=#9999FF;" edge="1" parent="1">
|
||||
<mxGeometry width="50" height="50" relative="1" as="geometry">
|
||||
<mxPoint x="550" y="259.5" as="sourcePoint"/>
|
||||
<mxPoint x="600" y="259.5" as="targetPoint"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="163" value="" style="shape=flexArrow;endArrow=classic;html=1;fontSize=13;fontColor=#FF3333;strokeColor=#000000;strokeWidth=1;fillColor=#9999FF;" edge="1" parent="1">
|
||||
<mxGeometry width="50" height="50" relative="1" as="geometry">
|
||||
<mxPoint x="879.5" y="490" as="sourcePoint"/>
|
||||
<mxPoint x="879.5" y="540" as="targetPoint"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="164" value="" style="shape=flexArrow;endArrow=classic;html=1;fontSize=13;fontColor=#FF3333;strokeColor=#000000;strokeWidth=1;fillColor=#9999FF;" edge="1" parent="1">
|
||||
<mxGeometry width="50" height="50" relative="1" as="geometry">
|
||||
<mxPoint x="274.5" y="1010" as="sourcePoint"/>
|
||||
<mxPoint x="274.5" y="1060" as="targetPoint"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="165" value="" style="shape=flexArrow;endArrow=classic;html=1;fontSize=13;fontColor=#FF3333;strokeColor=#000000;strokeWidth=1;fillColor=#9999FF;" edge="1" parent="1">
|
||||
<mxGeometry width="50" height="50" relative="1" as="geometry">
|
||||
<mxPoint x="550" y="1309" as="sourcePoint"/>
|
||||
<mxPoint x="600" y="1309" as="targetPoint"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="167" value="" style="shape=flexArrow;endArrow=classic;html=1;fontSize=13;fontColor=#FF3333;strokeColor=#000000;strokeWidth=1;fillColor=#9999FF;" edge="1" parent="1">
|
||||
<mxGeometry width="50" height="50" relative="1" as="geometry">
|
||||
<mxPoint x="606" y="779.5" as="sourcePoint"/>
|
||||
<mxPoint x="550" y="779.5" as="targetPoint"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
</root>
|
||||
</mxGraphModel>
|
||||
</diagram>
|
||||
</mxfile>
|
||||
95
docs/didi/drawio/Kafka基于网关的生产消费流程.drawio
Normal file
95
docs/didi/drawio/Kafka基于网关的生产消费流程.drawio
Normal file
@@ -0,0 +1,95 @@
|
||||
<mxfile host="65bd71144e">
|
||||
<diagram id="bhaMuW99Q1BzDTtcfRXp" name="Page-1">
|
||||
<mxGraphModel dx="1344" dy="785" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="1169" pageHeight="827" math="0" shadow="0">
|
||||
<root>
|
||||
<mxCell id="0"/>
|
||||
<mxCell id="1" parent="0"/>
|
||||
<mxCell id="27" value="Kafka集群--A" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=top;align=center;verticalAlign=bottom;" vertex="1" parent="1">
|
||||
<mxGeometry x="320" y="40" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="32" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="330" y="50" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="33" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="330" y="90" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="47" style="edgeStyle=orthogonalEdgeStyle;html=1;entryX=1;entryY=0.25;entryDx=0;entryDy=0;exitX=1;exitY=0.75;exitDx=0;exitDy=0;" edge="1" parent="1" source="36" target="27">
|
||||
<mxGeometry relative="1" as="geometry">
|
||||
<Array as="points">
|
||||
<mxPoint x="560" y="260"/>
|
||||
<mxPoint x="560" y="60"/>
|
||||
</Array>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="51" value="2、网关发现是A集群的KafkaUser,<br>网关将请求转发到A集群" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="47">
|
||||
<mxGeometry x="-0.0444" y="-1" relative="1" as="geometry">
|
||||
<mxPoint x="49" y="72" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="55" style="edgeStyle=orthogonalEdgeStyle;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="36" target="42">
|
||||
<mxGeometry relative="1" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="56" value="4、网关返回Topic元信息" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="55">
|
||||
<mxGeometry x="0.2125" relative="1" as="geometry">
|
||||
<mxPoint x="17" y="-10" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="36" value="Kafka网关" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="320" y="200" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="37" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="330" y="210" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="38" value="Kafka-Gateways" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="330" y="250" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="39" value="Kafka集群--B" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#cdeb8b;strokeColor=#36393d;labelPosition=center;verticalLabelPosition=bottom;align=center;verticalAlign=top;" vertex="1" parent="1">
|
||||
<mxGeometry x="320" y="360" width="160" height="80" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="40" value="Zookeeper" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
|
||||
<mxGeometry x="330" y="370" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="41" value="Kafka-Brokers" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="330" y="410" width="140" height="20" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="57" style="html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;strokeColor=default;startArrow=classic;startFill=1;" edge="1" parent="1" source="42" target="27">
|
||||
<mxGeometry relative="1" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="58" value="5、通过Topic元信息,<br>客户端直接访问A集群进行生产消费" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="57">
|
||||
<mxGeometry x="-0.0724" y="1" relative="1" as="geometry">
|
||||
<mxPoint x="-6" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="42" value="Kafka-Client" style="rounded=0;whiteSpace=wrap;html=1;absoluteArcSize=1;arcSize=14;strokeWidth=1;fillColor=#ffe6cc;strokeColor=#d79b00;" vertex="1" parent="1">
|
||||
<mxGeometry x="40" y="220" width="120" height="40" as="geometry"/>
|
||||
</mxCell>
|
||||
<mxCell id="48" style="html=1;entryX=0;entryY=0.75;entryDx=0;entryDy=0;exitX=0.5;exitY=1;exitDx=0;exitDy=0;edgeStyle=orthogonalEdgeStyle;" edge="1" parent="1" source="42" target="36">
|
||||
<mxGeometry relative="1" as="geometry">
|
||||
<mxPoint x="490" y="250" as="sourcePoint"/>
|
||||
<mxPoint x="490" y="90" as="targetPoint"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="50" value="1、请求Topic元信息" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="48">
|
||||
<mxGeometry x="-0.3373" y="-1" relative="1" as="geometry">
|
||||
<mxPoint x="17" y="7" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="49" style="edgeStyle=orthogonalEdgeStyle;html=1;entryX=1;entryY=0.25;entryDx=0;entryDy=0;exitX=1;exitY=0.75;exitDx=0;exitDy=0;" edge="1" parent="1" source="27" target="36">
|
||||
<mxGeometry relative="1" as="geometry">
|
||||
<mxPoint x="640" y="60" as="sourcePoint"/>
|
||||
<mxPoint x="490" y="70" as="targetPoint"/>
|
||||
<Array as="points">
|
||||
<mxPoint x="520" y="100"/>
|
||||
<mxPoint x="520" y="220"/>
|
||||
</Array>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
<mxCell id="52" value="3、A集群返回<br>Topic元信息给网关" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="49">
|
||||
<mxGeometry x="-0.03" y="-1" relative="1" as="geometry">
|
||||
<mxPoint x="-19" y="3" as="offset"/>
|
||||
</mxGeometry>
|
||||
</mxCell>
|
||||
</root>
|
||||
</mxGraphModel>
|
||||
</diagram>
|
||||
</mxfile>
|
||||
@@ -51,7 +51,7 @@ custom:
|
||||
didi:
|
||||
app-topic-metrics-enabled: false # 滴滴埋入的指标, 社区AK不存在该指标,因此默认关闭
|
||||
topic-request-time-metrics-enabled: false # 滴滴埋入的指标, 社区AK不存在该指标,因此默认关闭
|
||||
topic-throttled-metrics: false # 滴滴埋入的指标, 社区AK不存在该指标,因此默认关闭
|
||||
topic-throttled-metrics-enabled: false # 滴滴埋入的指标, 社区AK不存在该指标,因此默认关闭
|
||||
save-days: 7 #指标在DB中保持的天数,-1表示永久保存,7表示保存近7天的数据
|
||||
|
||||
# 任务相关的开关
|
||||
|
||||
132
docs/install_guide/install_guide_docker_cn.md
Normal file
132
docs/install_guide/install_guide_docker_cn.md
Normal file
@@ -0,0 +1,132 @@
|
||||
---
|
||||
|
||||

|
||||
|
||||
**一站式`Apache Kafka`集群指标监控与运维管控平台**
|
||||
|
||||
---
|
||||
|
||||
|
||||
## 基于Docker部署Logikm
|
||||
|
||||
为了方便用户快速的在自己的环境搭建Logikm,可使用docker快速搭建
|
||||
|
||||
### 部署Mysql
|
||||
|
||||
```shell
|
||||
docker run --name mysql -p 3306:3306 -d registry.cn-hangzhou.aliyuncs.com/zqqq/logikm-mysql:5.7.37
|
||||
```
|
||||
|
||||
可选变量参考[文档](https://hub.docker.com/_/mysql)
|
||||
|
||||
默认参数
|
||||
|
||||
* MYSQL_ROOT_PASSWORD:root
|
||||
|
||||
|
||||
|
||||
### 部署Logikm Allinone
|
||||
|
||||
> 前后端部署在一起
|
||||
|
||||
```shell
|
||||
docker run --name logikm -p 8080:8080 --link mysql -d registry.cn-hangzhou.aliyuncs.com/zqqq/logikm:2.6.0
|
||||
```
|
||||
|
||||
参数详解:
|
||||
|
||||
* -p 映射容器8080端口至宿主机的8080
|
||||
* --link 连接mysql容器
|
||||
|
||||
|
||||
|
||||
### 部署前后端分离
|
||||
|
||||
#### 部署后端 Logikm-backend
|
||||
|
||||
```shell
|
||||
docker run --name logikm-backend --link mysql -d registry.cn-hangzhou.aliyuncs.com/zqqq/logikm-backend:2.6.0
|
||||
```
|
||||
|
||||
可选参数:
|
||||
|
||||
* -e LOGI_MYSQL_HOST mysql连接地址,默认mysql
|
||||
* -e LOGI_MYSQL_PORT mysql端口,默认3306
|
||||
* -e LOGI_MYSQL_DATABASE 数据库,默认logi_kafka_manager
|
||||
* -e LOGI_MYSQL_USER mysql用户名,默认root
|
||||
* -e LOGI_MYSQL_PASSWORD mysql密码,默认root
|
||||
|
||||
#### 部署前端 Logikm-front
|
||||
|
||||
```shell
|
||||
docker run --name logikm-front -p 8088:80 --link logikm-backend -d registry.cn-hangzhou.aliyuncs.com/zqqq/logikm-front:2.6.0
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Logi后端可配置参数
|
||||
|
||||
docker run 运行参数 -e 可指定环境变量如下
|
||||
|
||||
| 环境变量 | 变量解释 | 默认值 |
|
||||
| ------------------- | ------------- | ------------------ |
|
||||
| LOGI_MYSQL_HOST | mysql连接地址 | mysql |
|
||||
| LOGI_MYSQL_PORT | mysql端口 | 3306 |
|
||||
| LOGI_MYSQL_DATABASE | 数据库 | logi_kafka_manager |
|
||||
| LOGI_MYSQL_USER | mysql用户名 | root |
|
||||
| LOGI_MYSQL_PASSWORD | mysql密码 | root |
|
||||
|
||||
|
||||
|
||||
|
||||
## 基于Docker源码构建
|
||||
|
||||
根据此文档用户可自行通过Docker 源码构建 Logikm
|
||||
|
||||
### 构建Mysql
|
||||
|
||||
```shell
|
||||
docker build -t mysql:{TAG} -f container/dockerfiles/mysql/Dockerfile container/dockerfiles/mysql
|
||||
```
|
||||
|
||||
### 构建Allinone
|
||||
|
||||
将前后端打包在一起
|
||||
|
||||
```shell
|
||||
docker build -t logikm:{TAG} .
|
||||
```
|
||||
|
||||
可选参数 --build-arg :
|
||||
|
||||
* MAVEN_VERSION maven镜像tag
|
||||
* JAVA_VERSION java镜像tag
|
||||
|
||||
|
||||
|
||||
### 构建前后端分离
|
||||
|
||||
前后端分离打包
|
||||
|
||||
#### 构建后端
|
||||
|
||||
```shell
|
||||
docker build --build-arg CONSOLE_ENABLE=false -t logikm-backend:{TAG} .
|
||||
```
|
||||
|
||||
参数:
|
||||
|
||||
* MAVEN_VERSION maven镜像tag
|
||||
* JAVA_VERSION java镜像tag
|
||||
|
||||
* CONSOLE_ENABLE=false 不构建console模块
|
||||
|
||||
#### 构建前端
|
||||
|
||||
```shell
|
||||
docker build -t logikm-front:{TAG} -f kafka-manager-console/Dockerfile kafka-manager-console
|
||||
```
|
||||
|
||||
可选参数:
|
||||
|
||||
* --build-arg:OUTPUT_PATH 修改默认打包输出路径,默认当前目录下的dist
|
||||
@@ -30,6 +30,7 @@
|
||||
- 18、如何在不登录的情况下,调用一些需要登录的接口?
|
||||
- 19、为什么无法看到连接信息、耗时信息等指标?
|
||||
- 20、AppID鉴权、生产消费配额不起作用
|
||||
- 21、如何查看周期任务说明文档
|
||||
|
||||
---
|
||||
|
||||
@@ -213,3 +214,6 @@ AppID鉴权、生产消费配额依赖于滴滴kafka-gateway,通过gateway进
|
||||
|
||||
具体见:[滴滴Logi-KafkaManager开源版和商业版特性对比](../开源版与商业版特性对比.md)
|
||||
|
||||
### 20、如何查看周期任务说明文档
|
||||
|
||||
具体见:[周期任务说明文档](../dev_guide/周期任务说明文档.md)
|
||||
@@ -21,15 +21,12 @@
|
||||
<java_target_version>1.8</java_target_version>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<file_encoding>UTF-8</file_encoding>
|
||||
|
||||
<spring-version>5.1.3.RELEASE</spring-version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-web</artifactId>
|
||||
<version>${spring-version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- http -->
|
||||
@@ -109,5 +106,21 @@
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.baomidou</groupId>
|
||||
<artifactId>mybatis-plus-boot-starter</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate.validator</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
@@ -0,0 +1,21 @@
|
||||
package com.xiaojukeji.kafka.manager.common.bizenum;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
@Getter
|
||||
public enum JobLogBizTypEnum {
|
||||
HA_SWITCH_JOB_LOG(100, "HA-主备切换日志"),
|
||||
|
||||
UNKNOWN(-1, "unknown"),
|
||||
|
||||
;
|
||||
|
||||
JobLogBizTypEnum(int code, String msg) {
|
||||
this.code = code;
|
||||
this.msg = msg;
|
||||
}
|
||||
|
||||
private final int code;
|
||||
|
||||
private final String msg;
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
package com.xiaojukeji.kafka.manager.kcm.common.bizenum;
|
||||
package com.xiaojukeji.kafka.manager.common.bizenum;
|
||||
|
||||
/**
|
||||
* 任务动作
|
||||
* @author zengqiao
|
||||
* @date 20/4/26
|
||||
*/
|
||||
public enum ClusterTaskActionEnum {
|
||||
public enum TaskActionEnum {
|
||||
UNKNOWN("unknown"),
|
||||
|
||||
START("start"),
|
||||
@@ -17,13 +17,15 @@ public enum ClusterTaskActionEnum {
|
||||
REDO("redo"),
|
||||
KILL("kill"),
|
||||
|
||||
FORCE("force"),
|
||||
|
||||
ROLLBACK("rollback"),
|
||||
|
||||
;
|
||||
|
||||
private String action;
|
||||
private final String action;
|
||||
|
||||
ClusterTaskActionEnum(String action) {
|
||||
TaskActionEnum(String action) {
|
||||
this.action = action;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
package com.xiaojukeji.kafka.manager.common.bizenum;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
/**
|
||||
* 任务状态
|
||||
* @author zengqiao
|
||||
* @date 2017/6/29.
|
||||
*/
|
||||
@Getter
|
||||
public enum TaskStatusEnum {
|
||||
UNKNOWN( -1, "未知"),
|
||||
|
||||
@@ -15,6 +18,7 @@ public enum TaskStatusEnum {
|
||||
|
||||
RUNNING( 30, "运行中"),
|
||||
KILLING( 31, "杀死中"),
|
||||
RUNNING_IN_TIMEOUT( 32, "超时运行中"),
|
||||
|
||||
BLOCKED( 40, "暂停"),
|
||||
|
||||
@@ -30,31 +34,15 @@ public enum TaskStatusEnum {
|
||||
|
||||
;
|
||||
|
||||
private Integer code;
|
||||
private final Integer code;
|
||||
|
||||
private String message;
|
||||
private final String message;
|
||||
|
||||
TaskStatusEnum(Integer code, String message) {
|
||||
this.code = code;
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public Integer getCode() {
|
||||
return code;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TaskStatusEnum{" +
|
||||
"code=" + code +
|
||||
", message='" + message + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
public static Boolean isFinished(Integer code) {
|
||||
return code >= FINISHED.getCode();
|
||||
}
|
||||
|
||||
@@ -17,9 +17,9 @@ public enum TopicAuthorityEnum {
|
||||
OWNER(4, "可管理"),
|
||||
;
|
||||
|
||||
private Integer code;
|
||||
private final Integer code;
|
||||
|
||||
private String message;
|
||||
private final String message;
|
||||
|
||||
TopicAuthorityEnum(Integer code, String message) {
|
||||
this.code = code;
|
||||
@@ -34,6 +34,16 @@ public enum TopicAuthorityEnum {
|
||||
return message;
|
||||
}
|
||||
|
||||
public static String getMsgByCode(Integer code) {
|
||||
for (TopicAuthorityEnum authorityEnum: TopicAuthorityEnum.values()) {
|
||||
if (authorityEnum.getCode().equals(code)) {
|
||||
return authorityEnum.message;
|
||||
}
|
||||
}
|
||||
|
||||
return DENY.message;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TopicAuthorityEnum{" +
|
||||
|
||||
@@ -10,12 +10,11 @@ public enum GatewayConfigKeyEnum {
|
||||
SD_APP_RATE("SD_APP_RATE", "SD_APP_RATE"),
|
||||
SD_IP_RATE("SD_IP_RATE", "SD_IP_RATE"),
|
||||
SD_SP_RATE("SD_SP_RATE", "SD_SP_RATE"),
|
||||
|
||||
;
|
||||
|
||||
private String configType;
|
||||
private final String configType;
|
||||
|
||||
private String configName;
|
||||
private final String configName;
|
||||
|
||||
GatewayConfigKeyEnum(String configType, String configName) {
|
||||
this.configType = configType;
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
package com.xiaojukeji.kafka.manager.common.bizenum.ha;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/7/28
|
||||
*/
|
||||
@Getter
|
||||
public enum HaRelationTypeEnum {
|
||||
UNKNOWN(-1, "非高可用"),
|
||||
|
||||
STANDBY(0, "备"),
|
||||
|
||||
ACTIVE(1, "主"),
|
||||
|
||||
MUTUAL_BACKUP(2 , "互备");
|
||||
|
||||
private final int code;
|
||||
|
||||
private final String msg;
|
||||
|
||||
HaRelationTypeEnum(int code, String msg) {
|
||||
this.code = code;
|
||||
this.msg = msg;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package com.xiaojukeji.kafka.manager.common.bizenum.ha;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/7/28
|
||||
*/
|
||||
@Getter
|
||||
public enum HaResTypeEnum {
|
||||
CLUSTER(0, "Cluster"),
|
||||
|
||||
TOPIC(1, "Topic"),
|
||||
|
||||
KAFKA_USER(2, "KafkaUser"),
|
||||
|
||||
KAFKA_USER_AND_CLIENT(3, "KafkaUserAndClient"),
|
||||
|
||||
;
|
||||
|
||||
private final int code;
|
||||
|
||||
private final String msg;
|
||||
|
||||
HaResTypeEnum(int code, String msg) {
|
||||
this.code = code;
|
||||
this.msg = msg;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
package com.xiaojukeji.kafka.manager.common.bizenum.ha;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/7/28
|
||||
*/
|
||||
public enum HaStatusEnum {
|
||||
UNKNOWN(-1, "未知状态"),
|
||||
|
||||
STABLE(HaStatusEnum.STABLE_CODE, "稳定状态"),
|
||||
|
||||
// SWITCHING(HaStatusEnum.SWITCHING_CODE, "切换中"),
|
||||
SWITCHING_PREPARE(
|
||||
HaStatusEnum.SWITCHING_PREPARE_CODE,
|
||||
"主备切换--源集群[%s]--预处理(阻止当前主Topic写入)"),
|
||||
|
||||
SWITCHING_WAITING_IN_SYNC(
|
||||
HaStatusEnum.SWITCHING_WAITING_IN_SYNC_CODE,
|
||||
"主备切换--目标集群[%s]--等待主与备Topic数据同步完成"),
|
||||
|
||||
SWITCHING_CLOSE_OLD_STANDBY_TOPIC_FETCH(
|
||||
HaStatusEnum.SWITCHING_CLOSE_OLD_STANDBY_TOPIC_FETCH_CODE,
|
||||
"主备切换--目标集群[%s]--关闭旧的备Topic的副本同步"),
|
||||
SWITCHING_OPEN_NEW_STANDBY_TOPIC_FETCH(
|
||||
HaStatusEnum.SWITCHING_OPEN_NEW_STANDBY_TOPIC_FETCH_CODE,
|
||||
"主备切换--源集群[%s]--开启新的备Topic的副本同步"),
|
||||
|
||||
SWITCHING_CLOSEOUT(
|
||||
HaStatusEnum.SWITCHING_CLOSEOUT_CODE,
|
||||
"主备切换--目标集群[%s]--收尾(允许新的主Topic写入)"),
|
||||
|
||||
;
|
||||
|
||||
public static final int UNKNOWN_CODE = -1;
|
||||
public static final int STABLE_CODE = 0;
|
||||
|
||||
public static final int SWITCHING_CODE = 100;
|
||||
public static final int SWITCHING_PREPARE_CODE = 101;
|
||||
|
||||
public static final int SWITCHING_WAITING_IN_SYNC_CODE = 102;
|
||||
public static final int SWITCHING_CLOSE_OLD_STANDBY_TOPIC_FETCH_CODE = 103;
|
||||
public static final int SWITCHING_OPEN_NEW_STANDBY_TOPIC_FETCH_CODE = 104;
|
||||
|
||||
public static final int SWITCHING_CLOSEOUT_CODE = 105;
|
||||
|
||||
|
||||
private final int code;
|
||||
|
||||
private final String msg;
|
||||
|
||||
public int getCode() {
|
||||
return code;
|
||||
}
|
||||
|
||||
public String getMsg(String clusterName) {
|
||||
if (this.code == UNKNOWN_CODE || this.code == STABLE_CODE) {
|
||||
return this.msg;
|
||||
}
|
||||
return String.format(msg, clusterName);
|
||||
}
|
||||
|
||||
HaStatusEnum(int code, String msg) {
|
||||
this.code = code;
|
||||
this.msg = msg;
|
||||
}
|
||||
|
||||
public static Integer calProgress(Integer status) {
|
||||
if (status == null || status == HaStatusEnum.STABLE_CODE || status == UNKNOWN_CODE) {
|
||||
return 100;
|
||||
}
|
||||
|
||||
// 最小进度为 1%
|
||||
return Math.max(1, (status - 101) * 100 / 5);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
package com.xiaojukeji.kafka.manager.common.bizenum.ha.job;
|
||||
|
||||
public enum HaJobActionEnum {
|
||||
/**
|
||||
*
|
||||
*/
|
||||
START(1,"start"),
|
||||
|
||||
STOP(2, "stop"),
|
||||
|
||||
CANCEL(3,"cancel"),
|
||||
|
||||
CONTINUE(4,"continue"),
|
||||
|
||||
UNKNOWN(-1, "unknown");
|
||||
|
||||
HaJobActionEnum(int status, String value) {
|
||||
this.status = status;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
private final int status;
|
||||
|
||||
private final String value;
|
||||
|
||||
public int getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public static HaJobActionEnum valueOfStatus(int status) {
|
||||
for (HaJobActionEnum statusEnum : HaJobActionEnum.values()) {
|
||||
if (status == statusEnum.getStatus()) {
|
||||
return statusEnum;
|
||||
}
|
||||
}
|
||||
|
||||
return HaJobActionEnum.UNKNOWN;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
package com.xiaojukeji.kafka.manager.common.bizenum.ha.job;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.bizenum.TaskStatusEnum;
|
||||
|
||||
public enum HaJobStatusEnum {
|
||||
/**执行中*/
|
||||
RUNNING(TaskStatusEnum.RUNNING),
|
||||
RUNNING_IN_TIMEOUT(TaskStatusEnum.RUNNING_IN_TIMEOUT),
|
||||
|
||||
SUCCESS(TaskStatusEnum.SUCCEED),
|
||||
|
||||
FAILED(TaskStatusEnum.FAILED),
|
||||
|
||||
UNKNOWN(TaskStatusEnum.UNKNOWN);
|
||||
|
||||
HaJobStatusEnum(TaskStatusEnum taskStatusEnum) {
|
||||
this.status = taskStatusEnum.getCode();
|
||||
this.value = taskStatusEnum.getMessage();
|
||||
}
|
||||
|
||||
private final int status;
|
||||
|
||||
private final String value;
|
||||
|
||||
public int getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public static HaJobStatusEnum valueOfStatus(int status) {
|
||||
for (HaJobStatusEnum statusEnum : HaJobStatusEnum.values()) {
|
||||
if (status == statusEnum.getStatus()) {
|
||||
return statusEnum;
|
||||
}
|
||||
}
|
||||
|
||||
return HaJobStatusEnum.UNKNOWN;
|
||||
}
|
||||
|
||||
public static HaJobStatusEnum getStatusBySubStatus(int totalJobNum,
|
||||
int successJobNu,
|
||||
int failedJobNu,
|
||||
int runningJobNu,
|
||||
int runningInTimeoutJobNu,
|
||||
int unknownJobNu) {
|
||||
if (unknownJobNu > 0) {
|
||||
return UNKNOWN;
|
||||
}
|
||||
|
||||
if((failedJobNu + runningJobNu + runningInTimeoutJobNu + unknownJobNu) == 0) {
|
||||
return SUCCESS;
|
||||
}
|
||||
|
||||
if((runningJobNu + runningInTimeoutJobNu + unknownJobNu) == 0 && failedJobNu > 0) {
|
||||
return FAILED;
|
||||
}
|
||||
|
||||
if (runningInTimeoutJobNu > 0) {
|
||||
return RUNNING_IN_TIMEOUT;
|
||||
}
|
||||
|
||||
return RUNNING;
|
||||
}
|
||||
|
||||
public static boolean isRunning(Integer jobStatus) {
|
||||
return jobStatus != null && (RUNNING.status == jobStatus || RUNNING_IN_TIMEOUT.status == jobStatus);
|
||||
}
|
||||
|
||||
public static boolean isFinished(Integer jobStatus) {
|
||||
return jobStatus != null && (SUCCESS.status == jobStatus || FAILED.status == jobStatus);
|
||||
}
|
||||
}
|
||||
@@ -20,12 +20,6 @@ public class ApiPrefix {
|
||||
// open
|
||||
public static final String API_V1_THIRD_PART_PREFIX = API_V1_PREFIX + "third-part/";
|
||||
|
||||
// 开放给OP的接口, 后续对 应的接口的集群都需要是物理集群
|
||||
public static final String API_V1_THIRD_PART_OP_PREFIX = API_V1_THIRD_PART_PREFIX + "op/";
|
||||
|
||||
// 开放给Normal的接口, 后续对应的接口的集群,都需要是逻辑集群
|
||||
public static final String API_V1_THIRD_PART_NORMAL_PREFIX = API_V1_THIRD_PART_PREFIX + "normal/";
|
||||
|
||||
// gateway
|
||||
public static final String GATEWAY_API_V1_PREFIX = "/gateway" + API_V1_PREFIX;
|
||||
|
||||
|
||||
@@ -31,6 +31,10 @@ public class ConfigConstant {
|
||||
|
||||
public static final String KAFKA_CLUSTER_DO_CONFIG_KEY = "KAFKA_CLUSTER_DO_CONFIG";
|
||||
|
||||
public static final String HA_SWITCH_JOB_TIMEOUT_UNIT_SEC_CONFIG_PREFIX = "HA_SWITCH_JOB_TIMEOUT_UNIT_SEC_CONFIG_CLUSTER";
|
||||
|
||||
public static final String HA_CONNECTION_ACTIVE_TIME_UNIT_MIN = "HA_CONNECTION_ACTIVE_TIME_UNIT_MIN";
|
||||
|
||||
private ConfigConstant() {
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ public class Constant {
|
||||
|
||||
public static final Integer MAX_AVG_BYTES_DURATION = 10;
|
||||
|
||||
public static final Integer BATCH_INSERT_SIZE = 50;
|
||||
public static final Integer BATCH_INSERT_SIZE = 30;
|
||||
|
||||
public static final Integer DEFAULT_SESSION_TIMEOUT_UNIT_MS = 30000;
|
||||
|
||||
|
||||
@@ -17,6 +17,36 @@ public class KafkaConstant {
|
||||
|
||||
public static final String RETENTION_MS_KEY = "retention.ms";
|
||||
|
||||
public static final String EXTERNAL_KEY = "EXTERNAL";
|
||||
|
||||
public static final String INTERNAL_KEY = "INTERNAL";
|
||||
|
||||
public static final String BOOTSTRAP_SERVERS = "bootstrap.servers";
|
||||
|
||||
|
||||
/**
|
||||
* HA
|
||||
*/
|
||||
|
||||
public static final String DIDI_KAFKA_ENABLE = "didi.kafka.enable";
|
||||
|
||||
public static final String DIDI_HA_REMOTE_CLUSTER = "didi.ha.remote.cluster";
|
||||
|
||||
// TODO 平台来管理配置,不需要底层来管理,因此可以删除该配置
|
||||
public static final String DIDI_HA_SYNC_TOPIC_CONFIGS_ENABLED = "didi.ha.sync.topic.configs.enabled";
|
||||
|
||||
public static final String DIDI_HA_ACTIVE_CLUSTER = "didi.ha.active.cluster";
|
||||
|
||||
public static final String DIDI_HA_REMOTE_TOPIC = "didi.ha.remote.topic";
|
||||
|
||||
public static final String SECURITY_PROTOCOL = "security.protocol";
|
||||
|
||||
public static final String SASL_MECHANISM = "sasl.mechanism";
|
||||
|
||||
public static final String SASL_JAAS_CONFIG = "sasl.jaas.config";
|
||||
|
||||
public static final String NONE = "None";
|
||||
|
||||
private KafkaConstant() {
|
||||
}
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
package com.xiaojukeji.kafka.manager.common.constant;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/8/10
|
||||
*/
|
||||
public class LogConstant {
|
||||
public static final String COLLECTOR_METRICS_LOGGER = "COLLECTOR_METRICS_LOGGER";
|
||||
|
||||
public static final String API_METRICS_LOGGER = "API_METRICS_LOGGER";
|
||||
|
||||
public static final String SCHEDULED_TASK_LOGGER = "SCHEDULED_TASK_LOGGER";
|
||||
|
||||
private LogConstant() {
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
package com.xiaojukeji.kafka.manager.common.constant;
|
||||
|
||||
/**
|
||||
* 信息模版Constant
|
||||
* @author zengqiao
|
||||
* @date 22/03/03
|
||||
*/
|
||||
public class MsgConstant {
|
||||
private MsgConstant() {
|
||||
}
|
||||
|
||||
/**************************************************** Cluster ****************************************************/
|
||||
|
||||
public static String getClusterBizStr(Long clusterPhyId, String clusterName){
|
||||
return String.format("集群ID:[%d] 集群名称:[%s]", clusterPhyId, clusterName);
|
||||
}
|
||||
|
||||
public static String getClusterPhyNotExist(Long clusterPhyId) {
|
||||
return String.format("集群ID:[%d] 不存在或者未加载", clusterPhyId);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**************************************************** Broker ****************************************************/
|
||||
|
||||
public static String getBrokerNotExist(Long clusterPhyId, Integer brokerId) {
|
||||
return String.format("集群ID:[%d] brokerId:[%d] 不存在或未存活", clusterPhyId, brokerId);
|
||||
}
|
||||
|
||||
public static String getBrokerBizStr(Long clusterPhyId, Integer brokerId) {
|
||||
return String.format("集群ID:[%d] brokerId:[%d]", clusterPhyId, brokerId);
|
||||
}
|
||||
|
||||
|
||||
/**************************************************** Topic ****************************************************/
|
||||
|
||||
public static String getTopicNotExist(Long clusterPhyId, String topicName) {
|
||||
return String.format("集群ID:[%d] Topic名称:[%s] 不存在", clusterPhyId, topicName);
|
||||
}
|
||||
|
||||
public static String getTopicBizStr(Long clusterPhyId, String topicName) {
|
||||
return String.format("集群ID:[%d] Topic名称:[%s]", clusterPhyId, topicName);
|
||||
}
|
||||
|
||||
public static String getTopicExtend(Long existPartitionNum, Long totalPartitionNum,String expandParam){
|
||||
return String.format("新增分区, 从:[%d] 增加到:[%d], 详细参数信息:[%s]", existPartitionNum,totalPartitionNum,expandParam);
|
||||
}
|
||||
|
||||
public static String getClusterTopicKey(Long clusterPhyId, String topicName) {
|
||||
return String.format("%d@%s", clusterPhyId, topicName);
|
||||
}
|
||||
|
||||
/**************************************************** Partition ****************************************************/
|
||||
|
||||
public static String getPartitionNotExist(Long clusterPhyId, String topicName) {
|
||||
return String.format("集群ID:[%d] Topic名称:[%s] 存在非法的分区ID", clusterPhyId, topicName);
|
||||
}
|
||||
|
||||
public static String getPartitionNotExist(Long clusterPhyId, String topicName, Integer partitionId) {
|
||||
return String.format("集群ID:[%d] Topic名称:[%s] 分区Id:[%d] 不存在", clusterPhyId, topicName, partitionId);
|
||||
}
|
||||
|
||||
/**************************************************** KafkaUser ****************************************************/
|
||||
|
||||
public static String getKafkaUserBizStr(Long clusterPhyId, String kafkaUser) {
|
||||
return String.format("集群ID:[%d] kafkaUser:[%s]", clusterPhyId, kafkaUser);
|
||||
}
|
||||
|
||||
public static String getKafkaUserNotExist(Long clusterPhyId, String kafkaUser) {
|
||||
return String.format("集群ID:[%d] kafkaUser:[%s] 不存在", clusterPhyId, kafkaUser);
|
||||
}
|
||||
|
||||
public static String getKafkaUserDuplicate(Long clusterPhyId, String kafkaUser) {
|
||||
return String.format("集群ID:[%d] kafkaUser:[%s] 已存在", clusterPhyId, kafkaUser);
|
||||
}
|
||||
|
||||
/**************************************************** ha-Cluster ****************************************************/
|
||||
|
||||
public static String getActiveClusterDuplicate(Long clusterPhyId, String clusterName) {
|
||||
return String.format("集群ID:[%d] 主集群:[%s] 已存在", clusterPhyId, clusterName);
|
||||
}
|
||||
|
||||
/**************************************************** reassign ****************************************************/
|
||||
|
||||
public static String getReassignJobBizStr(Long jobId, Long clusterPhyId) {
|
||||
return String.format("任务Id:[%d] 集群ID:[%s]", jobId, clusterPhyId);
|
||||
}
|
||||
|
||||
public static String getJobIdCanNotNull() {
|
||||
return "jobId不允许为空";
|
||||
}
|
||||
|
||||
public static String getJobNotExist(Long jobId) {
|
||||
return String.format("jobId:[%d] 不存在", jobId);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.constant.Constant;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
import lombok.ToString;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
@ToString
|
||||
public class BaseResult implements Serializable {
|
||||
private static final long serialVersionUID = -5771016784021901099L;
|
||||
|
||||
@ApiModelProperty(value = "信息", example = "成功")
|
||||
protected String message;
|
||||
|
||||
@ApiModelProperty(value = "状态", example = "0")
|
||||
protected int code;
|
||||
|
||||
public boolean successful() {
|
||||
return !this.failed();
|
||||
}
|
||||
|
||||
public boolean failed() {
|
||||
return !Constant.SUCCESS.equals(code);
|
||||
}
|
||||
}
|
||||
@@ -1,21 +1,23 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity;
|
||||
|
||||
import com.alibaba.fastjson.JSON;
|
||||
import com.xiaojukeji.kafka.manager.common.constant.Constant;
|
||||
|
||||
import java.io.Serializable;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* @author huangyiminghappy@163.com
|
||||
* @date 2019-07-08
|
||||
*/
|
||||
public class Result<T> implements Serializable {
|
||||
private static final long serialVersionUID = -2772975319944108658L;
|
||||
@Data
|
||||
@ApiModel(description = "调用结果")
|
||||
public class Result<T> extends BaseResult {
|
||||
@ApiModelProperty(value = "数据")
|
||||
protected T data;
|
||||
|
||||
private T data;
|
||||
private String message;
|
||||
private String tips;
|
||||
private int code;
|
||||
public Result() {
|
||||
this.code = ResultStatus.SUCCESS.getCode();
|
||||
this.message = ResultStatus.SUCCESS.getMessage();
|
||||
}
|
||||
|
||||
public Result(T data) {
|
||||
this.data = data;
|
||||
@@ -23,10 +25,6 @@ public class Result<T> implements Serializable {
|
||||
this.message = ResultStatus.SUCCESS.getMessage();
|
||||
}
|
||||
|
||||
public Result() {
|
||||
this(null);
|
||||
}
|
||||
|
||||
public Result(Integer code, String message) {
|
||||
this.message = message;
|
||||
this.code = code;
|
||||
@@ -38,98 +36,135 @@ public class Result<T> implements Serializable {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public T getData()
|
||||
{
|
||||
return (T)this.data;
|
||||
public static <T> Result<T> build(boolean succ) {
|
||||
if (succ) {
|
||||
return buildSuc();
|
||||
}
|
||||
return buildFail();
|
||||
}
|
||||
|
||||
public void setData(T data)
|
||||
{
|
||||
this.data = data;
|
||||
public static <T> Result<T> buildFail() {
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(ResultStatus.FAIL.getCode());
|
||||
result.setMessage(ResultStatus.FAIL.getMessage());
|
||||
return result;
|
||||
}
|
||||
|
||||
public String getMessage()
|
||||
{
|
||||
return this.message;
|
||||
public static <T> Result<T> build(boolean succ, T data) {
|
||||
Result<T> result = new Result<>();
|
||||
if (succ) {
|
||||
result.setCode(ResultStatus.SUCCESS.getCode());
|
||||
result.setMessage(ResultStatus.SUCCESS.getMessage());
|
||||
result.setData(data);
|
||||
} else {
|
||||
result.setCode(ResultStatus.FAIL.getCode());
|
||||
result.setMessage(ResultStatus.FAIL.getMessage());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public void setMessage(String message)
|
||||
{
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public String getTips() {
|
||||
return tips;
|
||||
}
|
||||
|
||||
public void setTips(String tips) {
|
||||
this.tips = tips;
|
||||
}
|
||||
|
||||
public int getCode()
|
||||
{
|
||||
return this.code;
|
||||
}
|
||||
|
||||
public void setCode(int code)
|
||||
{
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return JSON.toJSONString(this);
|
||||
}
|
||||
|
||||
public static Result buildSuc() {
|
||||
Result result = new Result();
|
||||
public static <T> Result<T> buildSuc() {
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(ResultStatus.SUCCESS.getCode());
|
||||
result.setMessage(ResultStatus.SUCCESS.getMessage());
|
||||
return result;
|
||||
}
|
||||
|
||||
public static <T> Result<T> buildSuc(T data) {
|
||||
Result<T> result = new Result<T>();
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(ResultStatus.SUCCESS.getCode());
|
||||
result.setMessage(ResultStatus.SUCCESS.getMessage());
|
||||
result.setData(data);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static <T> Result<T> buildGatewayFailure(String message) {
|
||||
Result<T> result = new Result<T>();
|
||||
result.setCode(ResultStatus.GATEWAY_INVALID_REQUEST.getCode());
|
||||
result.setMessage(message);
|
||||
result.setData(null);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static <T> Result<T> buildFailure(String message) {
|
||||
Result<T> result = new Result<T>();
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(ResultStatus.FAIL.getCode());
|
||||
result.setMessage(message);
|
||||
result.setData(null);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Result buildFrom(ResultStatus resultStatus) {
|
||||
Result result = new Result();
|
||||
result.setCode(resultStatus.getCode());
|
||||
result.setMessage(resultStatus.getMessage());
|
||||
public static <T> Result<T> buildFailure(String message, T data) {
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(ResultStatus.FAIL.getCode());
|
||||
result.setMessage(message);
|
||||
result.setData(data);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Result buildFrom(ResultStatus resultStatus, Object data) {
|
||||
Result result = new Result();
|
||||
public static <T> Result<T> buildFailure(ResultStatus rs) {
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(rs.getCode());
|
||||
result.setMessage(rs.getMessage());
|
||||
result.setData(null);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static <T> Result<T> buildGatewayFailure(String message) {
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(ResultStatus.GATEWAY_INVALID_REQUEST.getCode());
|
||||
result.setMessage(message);
|
||||
result.setData(null);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static <T> Result<T> buildFrom(ResultStatus rs) {
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(rs.getCode());
|
||||
result.setMessage(rs.getMessage());
|
||||
return result;
|
||||
}
|
||||
|
||||
public static <T> Result<T> buildFrom(ResultStatus resultStatus, T data) {
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(resultStatus.getCode());
|
||||
result.setMessage(resultStatus.getMessage());
|
||||
result.setData(data);
|
||||
return result;
|
||||
}
|
||||
|
||||
public boolean failed() {
|
||||
return !Constant.SUCCESS.equals(code);
|
||||
public static <T> Result<T> buildFromRSAndMsg(ResultStatus resultStatus, String message) {
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(resultStatus.getCode());
|
||||
result.setMessage(message);
|
||||
result.setData(null);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static <T> Result<T> buildFromRSAndData(ResultStatus rs, T data) {
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(rs.getCode());
|
||||
result.setMessage(rs.getMessage());
|
||||
result.setData(data);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static <T, U> Result<T> buildFromIgnoreData(Result<U> anotherResult) {
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(anotherResult.getCode());
|
||||
result.setMessage(anotherResult.getMessage());
|
||||
return result;
|
||||
}
|
||||
|
||||
public static <T> Result<T> buildParamIllegal(String msg) {
|
||||
Result<T> result = new Result<>();
|
||||
result.setCode(ResultStatus.PARAM_ILLEGAL.getCode());
|
||||
result.setMessage(ResultStatus.PARAM_ILLEGAL.getMessage() + ":" + msg + ",请检查后再提交!");
|
||||
return result;
|
||||
}
|
||||
|
||||
public boolean hasData(){
|
||||
return !failed() && this.data != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Result{" +
|
||||
"message='" + message + '\'' +
|
||||
", code=" + code +
|
||||
", data=" + data +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +23,8 @@ public enum ResultStatus {
|
||||
API_CALL_EXCEED_LIMIT(1403, "api call exceed limit"),
|
||||
USER_WITHOUT_AUTHORITY(1404, "user without authority"),
|
||||
CHANGE_ZOOKEEPER_FORBIDDEN(1405, "change zookeeper forbidden"),
|
||||
HA_CLUSTER_DELETE_FORBIDDEN(1409, "先删除主topic,才能删除该集群"),
|
||||
HA_TOPIC_DELETE_FORBIDDEN(1410, "先解除高可用关系,才能删除该topic"),
|
||||
|
||||
|
||||
APP_OFFLINE_FORBIDDEN(1406, "先下线topic,才能下线应用~"),
|
||||
@@ -76,6 +78,8 @@ public enum ResultStatus {
|
||||
QUOTA_NOT_EXIST(7113, "quota not exist, please check clusterId, topicName and appId"),
|
||||
CONSUMER_GROUP_NOT_EXIST(7114, "consumerGroup not exist"),
|
||||
TOPIC_BIZ_DATA_NOT_EXIST(7115, "topic biz data not exist, please sync topic to db"),
|
||||
SD_ZK_NOT_EXIST(7116, "SD_ZK未配置"),
|
||||
|
||||
|
||||
// 资源已存在
|
||||
RESOURCE_ALREADY_EXISTED(7200, "资源已经存在"),
|
||||
@@ -88,6 +92,7 @@ public enum ResultStatus {
|
||||
RESOURCE_ALREADY_USED(7400, "资源早已被使用"),
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* 因为外部系统的问题, 操作时引起的错误, [8000, 9000)
|
||||
* ------------------------------------------------------------------------------------------
|
||||
@@ -98,6 +103,7 @@ public enum ResultStatus {
|
||||
ZOOKEEPER_READ_FAILED(8021, "zookeeper read failed"),
|
||||
ZOOKEEPER_WRITE_FAILED(8022, "zookeeper write failed"),
|
||||
ZOOKEEPER_DELETE_FAILED(8023, "zookeeper delete failed"),
|
||||
ZOOKEEPER_OPERATE_FAILED(8024, "zookeeper operate failed"),
|
||||
|
||||
// 调用集群任务里面的agent失败
|
||||
CALL_CLUSTER_TASK_AGENT_FAILED(8030, " call cluster task agent failed"),
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/23
|
||||
*/
|
||||
@Data
|
||||
public class ClusterDetailDTO {
|
||||
private Long clusterId;
|
||||
|
||||
@@ -41,141 +44,9 @@ public class ClusterDetailDTO {
|
||||
|
||||
private Integer regionNum;
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
private Integer haRelation;
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getClusterName() {
|
||||
return clusterName;
|
||||
}
|
||||
|
||||
public void setClusterName(String clusterName) {
|
||||
this.clusterName = clusterName;
|
||||
}
|
||||
|
||||
public String getZookeeper() {
|
||||
return zookeeper;
|
||||
}
|
||||
|
||||
public void setZookeeper(String zookeeper) {
|
||||
this.zookeeper = zookeeper;
|
||||
}
|
||||
|
||||
public String getBootstrapServers() {
|
||||
return bootstrapServers;
|
||||
}
|
||||
|
||||
public void setBootstrapServers(String bootstrapServers) {
|
||||
this.bootstrapServers = bootstrapServers;
|
||||
}
|
||||
|
||||
public String getKafkaVersion() {
|
||||
return kafkaVersion;
|
||||
}
|
||||
|
||||
public void setKafkaVersion(String kafkaVersion) {
|
||||
this.kafkaVersion = kafkaVersion;
|
||||
}
|
||||
|
||||
public String getIdc() {
|
||||
return idc;
|
||||
}
|
||||
|
||||
public void setIdc(String idc) {
|
||||
this.idc = idc;
|
||||
}
|
||||
|
||||
public Integer getMode() {
|
||||
return mode;
|
||||
}
|
||||
|
||||
public void setMode(Integer mode) {
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
public String getSecurityProperties() {
|
||||
return securityProperties;
|
||||
}
|
||||
|
||||
public void setSecurityProperties(String securityProperties) {
|
||||
this.securityProperties = securityProperties;
|
||||
}
|
||||
|
||||
public String getJmxProperties() {
|
||||
return jmxProperties;
|
||||
}
|
||||
|
||||
public void setJmxProperties(String jmxProperties) {
|
||||
this.jmxProperties = jmxProperties;
|
||||
}
|
||||
|
||||
public Integer getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(Integer status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public Date getGmtCreate() {
|
||||
return gmtCreate;
|
||||
}
|
||||
|
||||
public void setGmtCreate(Date gmtCreate) {
|
||||
this.gmtCreate = gmtCreate;
|
||||
}
|
||||
|
||||
public Date getGmtModify() {
|
||||
return gmtModify;
|
||||
}
|
||||
|
||||
public void setGmtModify(Date gmtModify) {
|
||||
this.gmtModify = gmtModify;
|
||||
}
|
||||
|
||||
public Integer getBrokerNum() {
|
||||
return brokerNum;
|
||||
}
|
||||
|
||||
public void setBrokerNum(Integer brokerNum) {
|
||||
this.brokerNum = brokerNum;
|
||||
}
|
||||
|
||||
public Integer getTopicNum() {
|
||||
return topicNum;
|
||||
}
|
||||
|
||||
public void setTopicNum(Integer topicNum) {
|
||||
this.topicNum = topicNum;
|
||||
}
|
||||
|
||||
public Integer getConsumerGroupNum() {
|
||||
return consumerGroupNum;
|
||||
}
|
||||
|
||||
public void setConsumerGroupNum(Integer consumerGroupNum) {
|
||||
this.consumerGroupNum = consumerGroupNum;
|
||||
}
|
||||
|
||||
public Integer getControllerId() {
|
||||
return controllerId;
|
||||
}
|
||||
|
||||
public void setControllerId(Integer controllerId) {
|
||||
this.controllerId = controllerId;
|
||||
}
|
||||
|
||||
public Integer getRegionNum() {
|
||||
return regionNum;
|
||||
}
|
||||
|
||||
public void setRegionNum(Integer regionNum) {
|
||||
this.regionNum = regionNum;
|
||||
}
|
||||
private String mutualBackupClusterName;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
@@ -197,6 +68,8 @@ public class ClusterDetailDTO {
|
||||
", consumerGroupNum=" + consumerGroupNum +
|
||||
", controllerId=" + controllerId +
|
||||
", regionNum=" + regionNum +
|
||||
", haRelation=" + haRelation +
|
||||
", mutualBackupClusterName='" + mutualBackupClusterName + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
@@ -7,6 +9,7 @@ import java.util.Properties;
|
||||
* @author zengqiao
|
||||
* @date 20/6/10
|
||||
*/
|
||||
@Data
|
||||
public class RdTopicBasic {
|
||||
private Long clusterId;
|
||||
|
||||
@@ -26,77 +29,7 @@ public class RdTopicBasic {
|
||||
|
||||
private List<String> regionNameList;
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getClusterName() {
|
||||
return clusterName;
|
||||
}
|
||||
|
||||
public void setClusterName(String clusterName) {
|
||||
this.clusterName = clusterName;
|
||||
}
|
||||
|
||||
public String getTopicName() {
|
||||
return topicName;
|
||||
}
|
||||
|
||||
public void setTopicName(String topicName) {
|
||||
this.topicName = topicName;
|
||||
}
|
||||
|
||||
public Long getRetentionTime() {
|
||||
return retentionTime;
|
||||
}
|
||||
|
||||
public void setRetentionTime(Long retentionTime) {
|
||||
this.retentionTime = retentionTime;
|
||||
}
|
||||
|
||||
public String getAppId() {
|
||||
return appId;
|
||||
}
|
||||
|
||||
public void setAppId(String appId) {
|
||||
this.appId = appId;
|
||||
}
|
||||
|
||||
public String getAppName() {
|
||||
return appName;
|
||||
}
|
||||
|
||||
public void setAppName(String appName) {
|
||||
this.appName = appName;
|
||||
}
|
||||
|
||||
public Properties getProperties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
public void setProperties(Properties properties) {
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public List<String> getRegionNameList() {
|
||||
return regionNameList;
|
||||
}
|
||||
|
||||
public void setRegionNameList(List<String> regionNameList) {
|
||||
this.regionNameList = regionNameList;
|
||||
}
|
||||
private Integer haRelation;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
@@ -109,7 +42,8 @@ public class RdTopicBasic {
|
||||
", appName='" + appName + '\'' +
|
||||
", properties=" + properties +
|
||||
", description='" + description + '\'' +
|
||||
", regionNameList='" + regionNameList + '\'' +
|
||||
", regionNameList=" + regionNameList +
|
||||
", haRelation=" + haRelation +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao.common;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
import java.util.concurrent.Delayed;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@Getter
|
||||
public class FutureTaskDelayQueueData<T> implements Delayed {
|
||||
private final String taskName;
|
||||
|
||||
private final Future<T> futureTask;
|
||||
|
||||
private final long timeoutTimeUnitMs;
|
||||
|
||||
private final long createTimeUnitMs;
|
||||
|
||||
public FutureTaskDelayQueueData(String taskName, Future<T> futureTask, long timeoutTimeUnitMs) {
|
||||
this.taskName = taskName;
|
||||
this.futureTask = futureTask;
|
||||
this.timeoutTimeUnitMs = timeoutTimeUnitMs;
|
||||
this.createTimeUnitMs = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getDelay(TimeUnit unit) {
|
||||
return unit.convert(timeoutTimeUnitMs - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(Delayed delayed) {
|
||||
FutureTaskDelayQueueData<T> other = (FutureTaskDelayQueueData<T>) delayed;
|
||||
if (this.timeoutTimeUnitMs == other.timeoutTimeUnitMs) {
|
||||
return (this.timeoutTimeUnitMs + "_" + this.createTimeUnitMs).compareTo((other.timeoutTimeUnitMs + "_" + other.createTimeUnitMs));
|
||||
}
|
||||
|
||||
return (this.timeoutTimeUnitMs - other.timeoutTimeUnitMs) <= 0 ? -1: 1;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao.common;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class IpPortData implements Serializable {
|
||||
private static final long serialVersionUID = -428897032994630685L;
|
||||
|
||||
private String ip;
|
||||
|
||||
private String port;
|
||||
}
|
||||
@@ -10,6 +10,8 @@ import java.util.List;
|
||||
public class TopicExpiredConfig {
|
||||
private Integer minExpiredDay = 30;
|
||||
|
||||
private String filterRegex = "";
|
||||
|
||||
private List<Long> ignoreClusterIdList = new ArrayList<>();
|
||||
|
||||
public Integer getMinExpiredDay() {
|
||||
@@ -28,10 +30,19 @@ public class TopicExpiredConfig {
|
||||
this.ignoreClusterIdList = ignoreClusterIdList;
|
||||
}
|
||||
|
||||
public String getFilterRegex() {
|
||||
return filterRegex;
|
||||
}
|
||||
|
||||
public void setFilterRegex(String filterRegex) {
|
||||
this.filterRegex = filterRegex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TopicExpiredConfig{" +
|
||||
"minExpiredDay=" + minExpiredDay +
|
||||
", filterRegex='" + filterRegex + '\'' +
|
||||
", ignoreClusterIdList=" + ignoreClusterIdList +
|
||||
'}';
|
||||
}
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao.ha;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.bizenum.ha.HaStatusEnum;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
public class HaSwitchTopic {
|
||||
/**
|
||||
* 是否完成
|
||||
*/
|
||||
private boolean finished;
|
||||
|
||||
/**
|
||||
* 每一个Topic的状态
|
||||
*/
|
||||
private Map<String, Integer> activeTopicSwitchStatusMap;
|
||||
|
||||
public HaSwitchTopic(boolean finished) {
|
||||
this.finished = finished;
|
||||
this.activeTopicSwitchStatusMap = new HashMap<>();
|
||||
}
|
||||
|
||||
public void addHaSwitchTopic(HaSwitchTopic haSwitchTopic) {
|
||||
this.finished &= haSwitchTopic.finished;
|
||||
}
|
||||
|
||||
public boolean isFinished() {
|
||||
return this.finished;
|
||||
}
|
||||
|
||||
public void addActiveTopicStatus(String activeTopicName, Integer status) {
|
||||
activeTopicSwitchStatusMap.put(activeTopicName, status);
|
||||
}
|
||||
|
||||
public boolean isActiveTopicSwitchFinished(String activeTopicName) {
|
||||
Integer status = activeTopicSwitchStatusMap.get(activeTopicName);
|
||||
if (status == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return status.equals(HaStatusEnum.STABLE.getCode());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "HaSwitchTopic{" +
|
||||
"finished=" + finished +
|
||||
", activeTopicSwitchStatusMap=" + activeTopicSwitchStatusMap +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao.ha.job;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@ApiModel(description = "Job详情")
|
||||
public class HaJobDetail {
|
||||
@ApiModelProperty(value = "Topic名称")
|
||||
private String topicName;
|
||||
|
||||
@ApiModelProperty(value="主集群ID")
|
||||
private Long activeClusterPhyId;
|
||||
|
||||
@ApiModelProperty(value="备集群ID")
|
||||
private Long standbyClusterPhyId;
|
||||
|
||||
@ApiModelProperty(value="Lag和")
|
||||
private Long sumLag;
|
||||
|
||||
@ApiModelProperty(value="状态")
|
||||
private Integer status;
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao.ha.job;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@ApiModel(description = "Job日志")
|
||||
public class HaJobLog {
|
||||
@ApiModelProperty(value = "日志信息")
|
||||
private String log;
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao.ha.job;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.bizenum.ha.job.HaJobStatusEnum;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
public class HaJobState {
|
||||
|
||||
/**
|
||||
* @see com.xiaojukeji.kafka.manager.common.bizenum.ha.job.HaJobStatusEnum
|
||||
*/
|
||||
private int status;
|
||||
|
||||
private int total;
|
||||
|
||||
private int success;
|
||||
|
||||
private int failed;
|
||||
|
||||
private int doing;
|
||||
private int doingInTimeout;
|
||||
|
||||
private int unknown;
|
||||
|
||||
private Integer progress;
|
||||
|
||||
/**
|
||||
* 按照状态,直接进行聚合
|
||||
*/
|
||||
public HaJobState(List<Integer> jobStatusList, Integer progress) {
|
||||
this.total = jobStatusList.size();
|
||||
this.success = 0;
|
||||
this.failed = 0;
|
||||
this.doing = 0;
|
||||
this.doingInTimeout = 0;
|
||||
this.unknown = 0;
|
||||
for (Integer jobStatus: jobStatusList) {
|
||||
if (HaJobStatusEnum.SUCCESS.getStatus() == jobStatus) {
|
||||
success += 1;
|
||||
} else if (HaJobStatusEnum.FAILED.getStatus() == jobStatus) {
|
||||
failed += 1;
|
||||
} else if (HaJobStatusEnum.RUNNING.getStatus() == jobStatus) {
|
||||
doing += 1;
|
||||
} else if (HaJobStatusEnum.RUNNING_IN_TIMEOUT.getStatus() == jobStatus) {
|
||||
doingInTimeout += 1;
|
||||
} else {
|
||||
unknown += 1;
|
||||
}
|
||||
}
|
||||
|
||||
this.status = HaJobStatusEnum.getStatusBySubStatus(this.total, this.success, this.failed, this.doing, this.doingInTimeout, this.unknown).getStatus();
|
||||
|
||||
this.progress = progress;
|
||||
}
|
||||
|
||||
public HaJobState(Integer doingSize, Integer progress) {
|
||||
this.total = doingSize;
|
||||
this.success = 0;
|
||||
this.failed = 0;
|
||||
this.doing = doingSize;
|
||||
this.doingInTimeout = 0;
|
||||
this.unknown = 0;
|
||||
|
||||
this.progress = progress;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao.ha.job;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class HaSubJobExtendData {
|
||||
private Long sumLag;
|
||||
}
|
||||
@@ -1,11 +1,14 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao.topic;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author arthur
|
||||
* @date 2018/09/03
|
||||
*/
|
||||
@Data
|
||||
public class TopicBasicDTO {
|
||||
private Long clusterId;
|
||||
|
||||
@@ -39,133 +42,7 @@ public class TopicBasicDTO {
|
||||
|
||||
private Long retentionBytes;
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getAppId() {
|
||||
return appId;
|
||||
}
|
||||
|
||||
public void setAppId(String appId) {
|
||||
this.appId = appId;
|
||||
}
|
||||
|
||||
public String getAppName() {
|
||||
return appName;
|
||||
}
|
||||
|
||||
public void setAppName(String appName) {
|
||||
this.appName = appName;
|
||||
}
|
||||
|
||||
public String getPrincipals() {
|
||||
return principals;
|
||||
}
|
||||
|
||||
public void setPrincipals(String principals) {
|
||||
this.principals = principals;
|
||||
}
|
||||
|
||||
public String getTopicName() {
|
||||
return topicName;
|
||||
}
|
||||
|
||||
public void setTopicName(String topicName) {
|
||||
this.topicName = topicName;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public List<String> getRegionNameList() {
|
||||
return regionNameList;
|
||||
}
|
||||
|
||||
public void setRegionNameList(List<String> regionNameList) {
|
||||
this.regionNameList = regionNameList;
|
||||
}
|
||||
|
||||
public Integer getScore() {
|
||||
return score;
|
||||
}
|
||||
|
||||
public void setScore(Integer score) {
|
||||
this.score = score;
|
||||
}
|
||||
|
||||
public String getTopicCodeC() {
|
||||
return topicCodeC;
|
||||
}
|
||||
|
||||
public void setTopicCodeC(String topicCodeC) {
|
||||
this.topicCodeC = topicCodeC;
|
||||
}
|
||||
|
||||
public Integer getPartitionNum() {
|
||||
return partitionNum;
|
||||
}
|
||||
|
||||
public void setPartitionNum(Integer partitionNum) {
|
||||
this.partitionNum = partitionNum;
|
||||
}
|
||||
|
||||
public Integer getReplicaNum() {
|
||||
return replicaNum;
|
||||
}
|
||||
|
||||
public void setReplicaNum(Integer replicaNum) {
|
||||
this.replicaNum = replicaNum;
|
||||
}
|
||||
|
||||
public Integer getBrokerNum() {
|
||||
return brokerNum;
|
||||
}
|
||||
|
||||
public void setBrokerNum(Integer brokerNum) {
|
||||
this.brokerNum = brokerNum;
|
||||
}
|
||||
|
||||
public Long getModifyTime() {
|
||||
return modifyTime;
|
||||
}
|
||||
|
||||
public void setModifyTime(Long modifyTime) {
|
||||
this.modifyTime = modifyTime;
|
||||
}
|
||||
|
||||
public Long getCreateTime() {
|
||||
return createTime;
|
||||
}
|
||||
|
||||
public void setCreateTime(Long createTime) {
|
||||
this.createTime = createTime;
|
||||
}
|
||||
|
||||
public Long getRetentionTime() {
|
||||
return retentionTime;
|
||||
}
|
||||
|
||||
public void setRetentionTime(Long retentionTime) {
|
||||
this.retentionTime = retentionTime;
|
||||
}
|
||||
|
||||
public Long getRetentionBytes() {
|
||||
return retentionBytes;
|
||||
}
|
||||
|
||||
public void setRetentionBytes(Long retentionBytes) {
|
||||
this.retentionBytes = retentionBytes;
|
||||
}
|
||||
private Integer haRelation;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
@@ -186,6 +63,7 @@ public class TopicBasicDTO {
|
||||
", createTime=" + createTime +
|
||||
", retentionTime=" + retentionTime +
|
||||
", retentionBytes=" + retentionBytes +
|
||||
", haRelation=" + haRelation +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao.topic;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/20
|
||||
*/
|
||||
@Data
|
||||
public class TopicConnection {
|
||||
private Long clusterId;
|
||||
|
||||
@@ -19,72 +22,9 @@ public class TopicConnection {
|
||||
|
||||
private String clientVersion;
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
private String clientId;
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
private Long realConnectTime;
|
||||
|
||||
public String getTopicName() {
|
||||
return topicName;
|
||||
}
|
||||
|
||||
public void setTopicName(String topicName) {
|
||||
this.topicName = topicName;
|
||||
}
|
||||
|
||||
public String getAppId() {
|
||||
return appId;
|
||||
}
|
||||
|
||||
public void setAppId(String appId) {
|
||||
this.appId = appId;
|
||||
}
|
||||
|
||||
public String getIp() {
|
||||
return ip;
|
||||
}
|
||||
|
||||
public void setIp(String ip) {
|
||||
this.ip = ip;
|
||||
}
|
||||
|
||||
public String getHostname() {
|
||||
return hostname;
|
||||
}
|
||||
|
||||
public void setHostname(String hostname) {
|
||||
this.hostname = hostname;
|
||||
}
|
||||
|
||||
public String getClientType() {
|
||||
return clientType;
|
||||
}
|
||||
|
||||
public void setClientType(String clientType) {
|
||||
this.clientType = clientType;
|
||||
}
|
||||
|
||||
public String getClientVersion() {
|
||||
return clientVersion;
|
||||
}
|
||||
|
||||
public void setClientVersion(String clientVersion) {
|
||||
this.clientVersion = clientVersion;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TopicConnectionDTO{" +
|
||||
"clusterId=" + clusterId +
|
||||
", topicName='" + topicName + '\'' +
|
||||
", appId='" + appId + '\'' +
|
||||
", ip='" + ip + '\'' +
|
||||
", hostname='" + hostname + '\'' +
|
||||
", clientType='" + clientType + '\'' +
|
||||
", clientVersion='" + clientVersion + '\'' +
|
||||
'}';
|
||||
}
|
||||
private Long createTime;
|
||||
}
|
||||
@@ -1,10 +1,13 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.ao.topic;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* Topic概览信息
|
||||
* @author zengqiao
|
||||
* @date 20/5/14
|
||||
*/
|
||||
@Data
|
||||
public class TopicOverview {
|
||||
private Long clusterId;
|
||||
|
||||
@@ -32,109 +35,7 @@ public class TopicOverview {
|
||||
|
||||
private Long logicalClusterId;
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getTopicName() {
|
||||
return topicName;
|
||||
}
|
||||
|
||||
public void setTopicName(String topicName) {
|
||||
this.topicName = topicName;
|
||||
}
|
||||
|
||||
public Integer getReplicaNum() {
|
||||
return replicaNum;
|
||||
}
|
||||
|
||||
public void setReplicaNum(Integer replicaNum) {
|
||||
this.replicaNum = replicaNum;
|
||||
}
|
||||
|
||||
public Integer getPartitionNum() {
|
||||
return partitionNum;
|
||||
}
|
||||
|
||||
public void setPartitionNum(Integer partitionNum) {
|
||||
this.partitionNum = partitionNum;
|
||||
}
|
||||
|
||||
public Long getRetentionTime() {
|
||||
return retentionTime;
|
||||
}
|
||||
|
||||
public void setRetentionTime(Long retentionTime) {
|
||||
this.retentionTime = retentionTime;
|
||||
}
|
||||
|
||||
public Object getByteIn() {
|
||||
return byteIn;
|
||||
}
|
||||
|
||||
public void setByteIn(Object byteIn) {
|
||||
this.byteIn = byteIn;
|
||||
}
|
||||
|
||||
public Object getByteOut() {
|
||||
return byteOut;
|
||||
}
|
||||
|
||||
public void setByteOut(Object byteOut) {
|
||||
this.byteOut = byteOut;
|
||||
}
|
||||
|
||||
public Object getProduceRequest() {
|
||||
return produceRequest;
|
||||
}
|
||||
|
||||
public void setProduceRequest(Object produceRequest) {
|
||||
this.produceRequest = produceRequest;
|
||||
}
|
||||
|
||||
public String getAppName() {
|
||||
return appName;
|
||||
}
|
||||
|
||||
public void setAppName(String appName) {
|
||||
this.appName = appName;
|
||||
}
|
||||
|
||||
public String getAppId() {
|
||||
return appId;
|
||||
}
|
||||
|
||||
public void setAppId(String appId) {
|
||||
this.appId = appId;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public Long getUpdateTime() {
|
||||
return updateTime;
|
||||
}
|
||||
|
||||
public void setUpdateTime(Long updateTime) {
|
||||
this.updateTime = updateTime;
|
||||
}
|
||||
|
||||
public Long getLogicalClusterId() {
|
||||
return logicalClusterId;
|
||||
}
|
||||
|
||||
public void setLogicalClusterId(Long logicalClusterId) {
|
||||
this.logicalClusterId = logicalClusterId;
|
||||
}
|
||||
private Integer haRelation;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
@@ -152,6 +53,7 @@ public class TopicOverview {
|
||||
", description='" + description + '\'' +
|
||||
", updateTime=" + updateTime +
|
||||
", logicalClusterId=" + logicalClusterId +
|
||||
", haRelation=" + haRelation +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.dto.ha;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import javax.validation.constraints.NotBlank;
|
||||
|
||||
@Data
|
||||
@ApiModel(description="Topic信息")
|
||||
public class ASSwitchJobActionDTO {
|
||||
/**
|
||||
* @see com.xiaojukeji.kafka.manager.common.bizenum.TaskActionEnum
|
||||
*/
|
||||
@NotBlank(message = "action不允许为空")
|
||||
@ApiModelProperty(value = "动作, force")
|
||||
private String action;
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.dto.ha;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import javax.validation.Valid;
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
@ApiModel(description="主备切换任务")
|
||||
public class ASSwitchJobDTO {
|
||||
@NotNull(message = "all不允许为NULL")
|
||||
@ApiModelProperty(value = "所有Topic")
|
||||
private Boolean all;
|
||||
|
||||
@NotNull(message = "mustContainAllKafkaUserTopics不允许为NULL")
|
||||
@ApiModelProperty(value = "是否需要包含KafkaUser关联的所有Topic")
|
||||
private Boolean mustContainAllKafkaUserTopics;
|
||||
|
||||
@NotNull(message = "activeClusterPhyId不允许为NULL")
|
||||
@ApiModelProperty(value="主集群ID")
|
||||
private Long activeClusterPhyId;
|
||||
|
||||
@NotNull(message = "standbyClusterPhyId不允许为NULL")
|
||||
@ApiModelProperty(value="备集群ID")
|
||||
private Long standbyClusterPhyId;
|
||||
|
||||
@NotNull(message = "topicNameList不允许为NULL")
|
||||
@ApiModelProperty(value="切换的Topic名称列表")
|
||||
private List<String> topicNameList;
|
||||
|
||||
/**
|
||||
* kafkaUser+Client列表
|
||||
*/
|
||||
@Valid
|
||||
@ApiModelProperty(value="切换的KafkaUser&ClientId列表,Client可以为空串")
|
||||
private List<KafkaUserAndClientDTO> kafkaUserAndClientIdList;
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.dto.ha;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import javax.validation.constraints.NotBlank;
|
||||
|
||||
@Data
|
||||
@ApiModel(description="KafkaUser和ClientId信息")
|
||||
public class KafkaUserAndClientDTO {
|
||||
@NotBlank(message = "kafkaUser不允许为空串")
|
||||
@ApiModelProperty(value = "kafkaUser")
|
||||
private String kafkaUser;
|
||||
|
||||
@ApiModelProperty(value = "clientId")
|
||||
private String clientId;
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.dto.op.topic;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author huangyiminghappy@163.com, zengqiao
|
||||
* @date 2022-06-29
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
@ApiModel(description = "Topic高可用关联|解绑")
|
||||
public class HaTopicRelationDTO {
|
||||
@NotNull(message = "主集群id不能为空")
|
||||
@ApiModelProperty(value = "主集群id")
|
||||
private Long activeClusterId;
|
||||
|
||||
@NotNull(message = "备集群id不能为空")
|
||||
@ApiModelProperty(value = "备集群id")
|
||||
private Long standbyClusterId;
|
||||
|
||||
@NotNull(message = "是否应用于所有topic")
|
||||
@ApiModelProperty(value = "是否应用于所有topic")
|
||||
private Boolean all;
|
||||
|
||||
@ApiModelProperty(value = "需要关联|解绑的topic名称列表")
|
||||
private List<String> topicNames;
|
||||
|
||||
@ApiModelProperty(value = "解绑是否保留备集群资源(topic,kafkaUser,group)")
|
||||
private Boolean retainStandbyResource;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "HaTopicRelationDTO{" +
|
||||
", activeClusterId=" + activeClusterId +
|
||||
", standbyClusterId=" + standbyClusterId +
|
||||
", all=" + all +
|
||||
", topicNames=" + topicNames +
|
||||
", retainStandbyResource=" + retainStandbyResource +
|
||||
'}';
|
||||
}
|
||||
|
||||
public boolean paramLegal() {
|
||||
if(!all && ValidateUtils.isEmptyList(topicNames)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -21,6 +21,15 @@ public class AccountDTO {
|
||||
@ApiModelProperty(value = "角色")
|
||||
private Integer role;
|
||||
|
||||
@ApiModelProperty(value = "用户姓名")
|
||||
private String displayName;
|
||||
|
||||
@ApiModelProperty(value = "部门")
|
||||
private String department;
|
||||
|
||||
@ApiModelProperty(value = "邮箱")
|
||||
private String mail;
|
||||
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
@@ -45,12 +54,39 @@ public class AccountDTO {
|
||||
this.role = role;
|
||||
}
|
||||
|
||||
public String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
public void setDisplayName(String displayName) {
|
||||
this.displayName = displayName;
|
||||
}
|
||||
|
||||
public String getDepartment() {
|
||||
return department;
|
||||
}
|
||||
|
||||
public void setDepartment(String department) {
|
||||
this.department = department;
|
||||
}
|
||||
|
||||
public String getMail() {
|
||||
return mail;
|
||||
}
|
||||
|
||||
public void setMail(String mail) {
|
||||
this.mail = mail;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AccountDTO{" +
|
||||
"username='" + username + '\'' +
|
||||
", password='" + password + '\'' +
|
||||
", role=" + role +
|
||||
", displayName='" + displayName + '\'' +
|
||||
", department='" + department + '\'' +
|
||||
", mail='" + mail + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.dto.rd;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/5/4
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(description="App关联Topic信息")
|
||||
public class AppRelateTopicsDTO {
|
||||
@NotNull(message = "clusterPhyId不允许为NULL")
|
||||
@ApiModelProperty(value="物理集群ID")
|
||||
private Long clusterPhyId;
|
||||
|
||||
@NotNull(message = "filterTopicNameList不允许为NULL")
|
||||
@ApiModelProperty(value="过滤的Topic列表")
|
||||
private List<String> filterTopicNameList;
|
||||
|
||||
@ApiModelProperty(value="使用KafkaUser+Client维度的数据,默认是kafkaUser维度")
|
||||
private Boolean useKafkaUserAndClientId;
|
||||
|
||||
@NotNull(message = "ha不允许为NULL")
|
||||
@ApiModelProperty(value="查询是否高可用topic")
|
||||
private Boolean ha;
|
||||
}
|
||||
@@ -4,11 +4,13 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/23
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(description = "集群接入&修改")
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class ClusterDTO {
|
||||
@@ -33,60 +35,21 @@ public class ClusterDTO {
|
||||
@ApiModelProperty(value="Jmx配置")
|
||||
private String jmxProperties;
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
@ApiModelProperty(value="主集群Id")
|
||||
private Long activeClusterId;
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
@ApiModelProperty(value="是否高可用")
|
||||
private boolean isHa;
|
||||
|
||||
public String getClusterName() {
|
||||
return clusterName;
|
||||
}
|
||||
|
||||
public void setClusterName(String clusterName) {
|
||||
this.clusterName = clusterName;
|
||||
}
|
||||
|
||||
public String getZookeeper() {
|
||||
return zookeeper;
|
||||
}
|
||||
|
||||
public void setZookeeper(String zookeeper) {
|
||||
this.zookeeper = zookeeper;
|
||||
}
|
||||
|
||||
public String getBootstrapServers() {
|
||||
return bootstrapServers;
|
||||
}
|
||||
|
||||
public void setBootstrapServers(String bootstrapServers) {
|
||||
this.bootstrapServers = bootstrapServers;
|
||||
}
|
||||
|
||||
public String getIdc() {
|
||||
return idc;
|
||||
}
|
||||
|
||||
public void setIdc(String idc) {
|
||||
this.idc = idc;
|
||||
}
|
||||
|
||||
public String getSecurityProperties() {
|
||||
return securityProperties;
|
||||
}
|
||||
|
||||
public void setSecurityProperties(String securityProperties) {
|
||||
this.securityProperties = securityProperties;
|
||||
}
|
||||
|
||||
public String getJmxProperties() {
|
||||
return jmxProperties;
|
||||
}
|
||||
|
||||
public void setJmxProperties(String jmxProperties) {
|
||||
this.jmxProperties = jmxProperties;
|
||||
public boolean legal() {
|
||||
if (ValidateUtils.isNull(clusterName)
|
||||
|| ValidateUtils.isNull(zookeeper)
|
||||
|| ValidateUtils.isNull(idc)
|
||||
|| ValidateUtils.isNull(bootstrapServers)
|
||||
|| (isHa && ValidateUtils.isNull(activeClusterId))) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -99,16 +62,8 @@ public class ClusterDTO {
|
||||
", idc='" + idc + '\'' +
|
||||
", securityProperties='" + securityProperties + '\'' +
|
||||
", jmxProperties='" + jmxProperties + '\'' +
|
||||
", activeClusterId=" + activeClusterId +
|
||||
", isHa=" + isHa +
|
||||
'}';
|
||||
}
|
||||
|
||||
public boolean legal() {
|
||||
if (ValidateUtils.isNull(clusterName)
|
||||
|| ValidateUtils.isNull(zookeeper)
|
||||
|| ValidateUtils.isNull(idc)
|
||||
|| ValidateUtils.isNull(bootstrapServers)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -118,10 +118,7 @@ public class LogicalClusterDTO {
|
||||
}
|
||||
|
||||
public boolean legal() {
|
||||
if (ValidateUtils.isNull(clusterId)
|
||||
|| ValidateUtils.isNull(clusterId)
|
||||
|| ValidateUtils.isEmptyList(regionIdList)
|
||||
|| ValidateUtils.isNull(mode)) {
|
||||
if (ValidateUtils.isNull(clusterId) || ValidateUtils.isEmptyList(regionIdList) || ValidateUtils.isNull(mode)) {
|
||||
return false;
|
||||
}
|
||||
if (!ClusterModeEnum.SHARED_MODE.getCode().equals(mode) && ValidateUtils.isNull(appId)) {
|
||||
|
||||
@@ -94,10 +94,7 @@ public class RegionDTO {
|
||||
}
|
||||
|
||||
public boolean legal() {
|
||||
if (ValidateUtils.isNull(clusterId)
|
||||
|| ValidateUtils.isNull(clusterId)
|
||||
|| ValidateUtils.isEmptyList(brokerIdList)
|
||||
|| ValidateUtils.isNull(status)) {
|
||||
if (ValidateUtils.isNull(clusterId) || ValidateUtils.isEmptyList(brokerIdList) || ValidateUtils.isNull(status)) {
|
||||
return false;
|
||||
}
|
||||
description = ValidateUtils.isNull(description)? "": description;
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.pagination;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
@ApiModel(description = "分页信息")
|
||||
public class Pagination {
|
||||
@ApiModelProperty(value = "总记录数", example = "100")
|
||||
private long total;
|
||||
|
||||
@ApiModelProperty(value = "当前页码", example = "0")
|
||||
private long pageNo;
|
||||
|
||||
@ApiModelProperty(value = "单页大小", example = "10")
|
||||
private long pageSize;
|
||||
|
||||
public Pagination(long total, long pageNo, long pageSize) {
|
||||
this.total = total;
|
||||
this.pageNo = pageNo;
|
||||
this.pageSize = pageSize;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.pagination;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
@ApiModel(description = "分页数据")
|
||||
public class PaginationData<T> {
|
||||
@ApiModelProperty(value = "业务数据")
|
||||
private List<T> bizData;
|
||||
|
||||
@ApiModelProperty(value = "分页信息")
|
||||
private Pagination pagination;
|
||||
}
|
||||
@@ -21,6 +21,12 @@ public class AccountDO {
|
||||
|
||||
private Integer role;
|
||||
|
||||
private String displayName;
|
||||
|
||||
private String department;
|
||||
|
||||
private String mail;
|
||||
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
@@ -45,16 +51,43 @@ public class AccountDO {
|
||||
this.role = role;
|
||||
}
|
||||
|
||||
public String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
public void setDisplayName(String displayName) {
|
||||
this.displayName = displayName;
|
||||
}
|
||||
|
||||
public String getDepartment() {
|
||||
return department;
|
||||
}
|
||||
|
||||
public void setDepartment(String department) {
|
||||
this.department = department;
|
||||
}
|
||||
|
||||
public String getMail() {
|
||||
return mail;
|
||||
}
|
||||
|
||||
public void setMail(String mail) {
|
||||
this.mail = mail;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AccountDO{" +
|
||||
"username='" + username + '\'' +
|
||||
", password='" + password + '\'' +
|
||||
", role=" + role +
|
||||
", id=" + id +
|
||||
"id=" + id +
|
||||
", status=" + status +
|
||||
", gmtCreate=" + gmtCreate +
|
||||
", gmtModify=" + gmtModify +
|
||||
", username='" + username + '\'' +
|
||||
", password='" + password + '\'' +
|
||||
", role=" + role +
|
||||
", displayName='" + displayName + '\'' +
|
||||
", department='" + department + '\'' +
|
||||
", mail='" + mail + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.pojo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 21/07/19
|
||||
*/
|
||||
@Data
|
||||
public class BaseDO implements Serializable {
|
||||
private static final long serialVersionUID = 8782560709154468485L;
|
||||
|
||||
/**
|
||||
* 主键ID
|
||||
*/
|
||||
protected Long id;
|
||||
|
||||
/**
|
||||
* 创建时间
|
||||
*/
|
||||
protected Date createTime;
|
||||
|
||||
/**
|
||||
* 更新时间
|
||||
*/
|
||||
protected Date modifyTime;
|
||||
}
|
||||
@@ -1,11 +1,18 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.pojo;
|
||||
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.ToString;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/6/29
|
||||
*/
|
||||
@Data
|
||||
@ToString
|
||||
@NoArgsConstructor
|
||||
public class LogicalClusterDO {
|
||||
private Long id;
|
||||
|
||||
@@ -27,99 +34,17 @@ public class LogicalClusterDO {
|
||||
|
||||
private Date gmtModify;
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
public LogicalClusterDO(String name,
|
||||
String identification,
|
||||
Integer mode,
|
||||
String appId,
|
||||
Long clusterId,
|
||||
String regionList) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getIdentification() {
|
||||
return identification;
|
||||
}
|
||||
|
||||
public void setIdentification(String identification) {
|
||||
this.identification = identification;
|
||||
}
|
||||
|
||||
public Integer getMode() {
|
||||
return mode;
|
||||
}
|
||||
|
||||
public void setMode(Integer mode) {
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
public String getAppId() {
|
||||
return appId;
|
||||
}
|
||||
|
||||
public void setAppId(String appId) {
|
||||
this.appId = appId;
|
||||
}
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getRegionList() {
|
||||
return regionList;
|
||||
}
|
||||
|
||||
public void setRegionList(String regionList) {
|
||||
this.regionList = regionList;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public Date getGmtCreate() {
|
||||
return gmtCreate;
|
||||
}
|
||||
|
||||
public void setGmtCreate(Date gmtCreate) {
|
||||
this.gmtCreate = gmtCreate;
|
||||
}
|
||||
|
||||
public Date getGmtModify() {
|
||||
return gmtModify;
|
||||
}
|
||||
|
||||
public void setGmtModify(Date gmtModify) {
|
||||
this.gmtModify = gmtModify;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "LogicalClusterDO{" +
|
||||
"id=" + id +
|
||||
", name='" + name + '\'' +
|
||||
", identification='" + identification + '\'' +
|
||||
", mode=" + mode +
|
||||
", appId='" + appId + '\'' +
|
||||
", clusterId=" + clusterId +
|
||||
", regionList='" + regionList + '\'' +
|
||||
", description='" + description + '\'' +
|
||||
", gmtCreate=" + gmtCreate +
|
||||
", gmtModify=" + gmtModify +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,14 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.pojo;
|
||||
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.ToString;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
@Data
|
||||
@ToString
|
||||
@NoArgsConstructor
|
||||
public class RegionDO implements Comparable<RegionDO> {
|
||||
private Long id;
|
||||
|
||||
@@ -25,111 +32,13 @@ public class RegionDO implements Comparable<RegionDO> {
|
||||
|
||||
private String description;
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public Integer getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(Integer status) {
|
||||
public RegionDO(Integer status, String name, Long clusterId, String brokerList) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public Date getGmtCreate() {
|
||||
return gmtCreate;
|
||||
}
|
||||
|
||||
public void setGmtCreate(Date gmtCreate) {
|
||||
this.gmtCreate = gmtCreate;
|
||||
}
|
||||
|
||||
public Date getGmtModify() {
|
||||
return gmtModify;
|
||||
}
|
||||
|
||||
public void setGmtModify(Date gmtModify) {
|
||||
this.gmtModify = gmtModify;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getBrokerList() {
|
||||
return brokerList;
|
||||
}
|
||||
|
||||
public void setBrokerList(String brokerList) {
|
||||
this.brokerList = brokerList;
|
||||
}
|
||||
|
||||
public Long getCapacity() {
|
||||
return capacity;
|
||||
}
|
||||
|
||||
public void setCapacity(Long capacity) {
|
||||
this.capacity = capacity;
|
||||
}
|
||||
|
||||
public Long getRealUsed() {
|
||||
return realUsed;
|
||||
}
|
||||
|
||||
public void setRealUsed(Long realUsed) {
|
||||
this.realUsed = realUsed;
|
||||
}
|
||||
|
||||
public Long getEstimateUsed() {
|
||||
return estimateUsed;
|
||||
}
|
||||
|
||||
public void setEstimateUsed(Long estimateUsed) {
|
||||
this.estimateUsed = estimateUsed;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "RegionDO{" +
|
||||
"id=" + id +
|
||||
", status=" + status +
|
||||
", gmtCreate=" + gmtCreate +
|
||||
", gmtModify=" + gmtModify +
|
||||
", name='" + name + '\'' +
|
||||
", clusterId=" + clusterId +
|
||||
", brokerList='" + brokerList + '\'' +
|
||||
", capacity=" + capacity +
|
||||
", realUsed=" + realUsed +
|
||||
", estimateUsed=" + estimateUsed +
|
||||
", description='" + description + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(RegionDO regionDO) {
|
||||
return this.id.compareTo(regionDO.id);
|
||||
|
||||
@@ -2,6 +2,8 @@ package com.xiaojukeji.kafka.manager.common.entity.pojo;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.entity.dto.op.topic.TopicCreationDTO;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
@@ -9,6 +11,8 @@ import java.util.Date;
|
||||
* @author zengqiao
|
||||
* @date 20/4/24
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
public class TopicDO {
|
||||
private Long id;
|
||||
|
||||
@@ -26,70 +30,14 @@ public class TopicDO {
|
||||
|
||||
private Long peakBytesIn;
|
||||
|
||||
public String getAppId() {
|
||||
return appId;
|
||||
}
|
||||
|
||||
public void setAppId(String appId) {
|
||||
public TopicDO(String appId, Long clusterId, String topicName, String description, Long peakBytesIn) {
|
||||
this.appId = appId;
|
||||
}
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getTopicName() {
|
||||
return topicName;
|
||||
}
|
||||
|
||||
public void setTopicName(String topicName) {
|
||||
this.topicName = topicName;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public Long getPeakBytesIn() {
|
||||
return peakBytesIn;
|
||||
}
|
||||
|
||||
public void setPeakBytesIn(Long peakBytesIn) {
|
||||
this.peakBytesIn = peakBytesIn;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public Date getGmtCreate() {
|
||||
return gmtCreate;
|
||||
}
|
||||
|
||||
public void setGmtCreate(Date gmtCreate) {
|
||||
this.gmtCreate = gmtCreate;
|
||||
}
|
||||
|
||||
public Date getGmtModify() {
|
||||
return gmtModify;
|
||||
}
|
||||
|
||||
public void setGmtModify(Date gmtModify) {
|
||||
this.gmtModify = gmtModify;
|
||||
}
|
||||
|
||||
public static TopicDO buildFrom(TopicCreationDTO dto) {
|
||||
TopicDO topicDO = new TopicDO();
|
||||
topicDO.setAppId(dto.getAppId());
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.pojo.gateway;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
@@ -7,6 +9,7 @@ import java.util.Date;
|
||||
* @author zengqiao
|
||||
* @date 20/7/6
|
||||
*/
|
||||
@Data
|
||||
public class TopicConnectionDO {
|
||||
private Long id;
|
||||
|
||||
@@ -22,87 +25,13 @@ public class TopicConnectionDO {
|
||||
|
||||
private String clientVersion;
|
||||
|
||||
private String clientId;
|
||||
|
||||
private Long realConnectTime;
|
||||
|
||||
private Date createTime;
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getTopicName() {
|
||||
return topicName;
|
||||
}
|
||||
|
||||
public void setTopicName(String topicName) {
|
||||
this.topicName = topicName;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public String getAppId() {
|
||||
return appId;
|
||||
}
|
||||
|
||||
public void setAppId(String appId) {
|
||||
this.appId = appId;
|
||||
}
|
||||
|
||||
public String getIp() {
|
||||
return ip;
|
||||
}
|
||||
|
||||
public void setIp(String ip) {
|
||||
this.ip = ip;
|
||||
}
|
||||
|
||||
public String getClientVersion() {
|
||||
return clientVersion;
|
||||
}
|
||||
|
||||
public void setClientVersion(String clientVersion) {
|
||||
this.clientVersion = clientVersion;
|
||||
}
|
||||
|
||||
public Date getCreateTime() {
|
||||
return createTime;
|
||||
}
|
||||
|
||||
public void setCreateTime(Date createTime) {
|
||||
this.createTime = createTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TopicConnectionDO{" +
|
||||
"id=" + id +
|
||||
", clusterId=" + clusterId +
|
||||
", topicName='" + topicName + '\'' +
|
||||
", type='" + type + '\'' +
|
||||
", appId='" + appId + '\'' +
|
||||
", ip='" + ip + '\'' +
|
||||
", clientVersion='" + clientVersion + '\'' +
|
||||
", createTime=" + createTime +
|
||||
'}';
|
||||
}
|
||||
|
||||
public String uniqueKey() {
|
||||
return appId + clusterId + topicName + type + ip;
|
||||
return appId + clusterId + topicName + type + ip + clientId;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.pojo.ha;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.xiaojukeji.kafka.manager.common.bizenum.ha.HaResTypeEnum;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.BaseDO;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
|
||||
/**
|
||||
* HA-主备关系表
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@TableName("ha_active_standby_relation")
|
||||
public class HaASRelationDO extends BaseDO {
|
||||
/**
|
||||
* 主集群ID
|
||||
*/
|
||||
private Long activeClusterPhyId;
|
||||
|
||||
/**
|
||||
* 主集群资源名称
|
||||
*/
|
||||
private String activeResName;
|
||||
|
||||
/**
|
||||
* 备集群ID
|
||||
*/
|
||||
private Long standbyClusterPhyId;
|
||||
|
||||
/**
|
||||
* 备集群资源名称
|
||||
*/
|
||||
private String standbyResName;
|
||||
|
||||
/**
|
||||
* 资源类型
|
||||
* @see HaResTypeEnum
|
||||
*/
|
||||
private Integer resType;
|
||||
|
||||
/**
|
||||
* 主备状态
|
||||
*/
|
||||
private Integer status;
|
||||
|
||||
/**
|
||||
* 主备关系中的唯一性字段
|
||||
*/
|
||||
private String uniqueField;
|
||||
|
||||
public HaASRelationDO(Long id, Integer status) {
|
||||
this.id = id;
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public HaASRelationDO(Long activeClusterPhyId, String activeResName, Long standbyClusterPhyId, String standbyResName, Integer resType, Integer status) {
|
||||
this.activeClusterPhyId = activeClusterPhyId;
|
||||
this.activeResName = activeResName;
|
||||
this.standbyClusterPhyId = standbyClusterPhyId;
|
||||
this.standbyResName = standbyResName;
|
||||
this.resType = resType;
|
||||
this.status = status;
|
||||
|
||||
// 主备两个资源之间唯一,但是不保证两个资源之间,只存在主备关系,也可能存在双活关系,及各自都为对方的主备
|
||||
this.uniqueField = String.format("%d_%s||%d_%s||%d", activeClusterPhyId, activeResName, standbyClusterPhyId, standbyResName, resType);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.pojo.ha;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.dto.ha.KafkaUserAndClientDTO;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.BaseDO;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ConvertUtil;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
|
||||
/**
|
||||
* HA-主备关系切换任务表
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@TableName("ha_active_standby_switch_job")
|
||||
public class HaASSwitchJobDO extends BaseDO {
|
||||
/**
|
||||
* 主集群ID
|
||||
*/
|
||||
private Long activeClusterPhyId;
|
||||
|
||||
/**
|
||||
* 备集群ID
|
||||
*/
|
||||
private Long standbyClusterPhyId;
|
||||
|
||||
/**
|
||||
* 主备状态
|
||||
*/
|
||||
private Integer jobStatus;
|
||||
|
||||
/**
|
||||
* 类型,0:kafkaUser 1:kafkaUser+Client
|
||||
*/
|
||||
private Integer type;
|
||||
|
||||
/**
|
||||
* 扩展数据
|
||||
*/
|
||||
private String extendData;
|
||||
|
||||
/**
|
||||
* 操作人
|
||||
*/
|
||||
private String operator;
|
||||
|
||||
public HaASSwitchJobDO(Long activeClusterPhyId, Long standbyClusterPhyId, Integer type, List<KafkaUserAndClientDTO> extendDataObj, Integer jobStatus, String operator) {
|
||||
this.activeClusterPhyId = activeClusterPhyId;
|
||||
this.standbyClusterPhyId = standbyClusterPhyId;
|
||||
this.type = type;
|
||||
this.extendData = ValidateUtils.isEmptyList(extendDataObj)? "": ConvertUtil.obj2Json(extendDataObj);
|
||||
this.jobStatus = jobStatus;
|
||||
this.operator = operator;
|
||||
}
|
||||
|
||||
public List<KafkaUserAndClientDTO> getExtendRawData() {
|
||||
if (ValidateUtils.isBlank(extendData)) {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
return ConvertUtil.str2ObjArrayByJson(extendData, KafkaUserAndClientDTO.class);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.pojo.ha;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.BaseDO;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
|
||||
/**
|
||||
* HA-主备关系切换子任务表
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@TableName("ha_active_standby_switch_sub_job")
|
||||
public class HaASSwitchSubJobDO extends BaseDO {
|
||||
/**
|
||||
* 任务ID
|
||||
*/
|
||||
private Long jobId;
|
||||
|
||||
/**
|
||||
* 主集群ID
|
||||
*/
|
||||
private Long activeClusterPhyId;
|
||||
|
||||
/**
|
||||
* 主集群资源名称
|
||||
*/
|
||||
private String activeResName;
|
||||
|
||||
/**
|
||||
* 备集群ID
|
||||
*/
|
||||
private Long standbyClusterPhyId;
|
||||
|
||||
/**
|
||||
* 备集群资源名称
|
||||
*/
|
||||
private String standbyResName;
|
||||
|
||||
/**
|
||||
* 资源类型
|
||||
*/
|
||||
private Integer resType;
|
||||
|
||||
/**
|
||||
* 任务状态
|
||||
*/
|
||||
private Integer jobStatus;
|
||||
|
||||
/**
|
||||
* 扩展数据
|
||||
* @see com.xiaojukeji.kafka.manager.common.entity.ao.ha.job.HaSubJobExtendData
|
||||
*/
|
||||
private String extendData;
|
||||
|
||||
public HaASSwitchSubJobDO(Long jobId, Long activeClusterPhyId, String activeResName, Long standbyClusterPhyId, String standbyResName, Integer resType, Integer jobStatus, String extendData) {
|
||||
this.jobId = jobId;
|
||||
this.activeClusterPhyId = activeClusterPhyId;
|
||||
this.activeResName = activeResName;
|
||||
this.standbyClusterPhyId = standbyClusterPhyId;
|
||||
this.standbyResName = standbyResName;
|
||||
this.resType = resType;
|
||||
this.jobStatus = jobStatus;
|
||||
this.extendData = extendData;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.pojo.ha;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.BaseDO;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@TableName("job_log")
|
||||
public class JobLogDO extends BaseDO {
|
||||
/**
|
||||
* 业务类型
|
||||
*/
|
||||
private Integer bizType;
|
||||
|
||||
/**
|
||||
* 业务关键字
|
||||
*/
|
||||
private String bizKeyword;
|
||||
|
||||
/**
|
||||
* 打印时间
|
||||
*/
|
||||
private Date printTime;
|
||||
|
||||
/**
|
||||
* 内容
|
||||
*/
|
||||
private String content;
|
||||
|
||||
public JobLogDO(Integer bizType, String bizKeyword) {
|
||||
this.bizType = bizType;
|
||||
this.bizKeyword = bizKeyword;
|
||||
}
|
||||
|
||||
public JobLogDO(Integer bizType, String bizKeyword, Date printTime, String content) {
|
||||
this.bizType = bizType;
|
||||
this.bizKeyword = bizKeyword;
|
||||
this.printTime = printTime;
|
||||
this.content = content;
|
||||
}
|
||||
|
||||
public JobLogDO setAndCopyNew(Date printTime, String content) {
|
||||
return new JobLogDO(this.bizType, this.bizKeyword, printTime, content);
|
||||
}
|
||||
}
|
||||
@@ -2,12 +2,14 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.common;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* Topic信息
|
||||
* @author zengqiao
|
||||
* @date 19/4/1
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(description = "Topic信息概览")
|
||||
public class TopicOverviewVO {
|
||||
@ApiModelProperty(value = "集群ID")
|
||||
@@ -49,109 +51,8 @@ public class TopicOverviewVO {
|
||||
@ApiModelProperty(value = "逻辑集群id")
|
||||
private Long logicalClusterId;
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getTopicName() {
|
||||
return topicName;
|
||||
}
|
||||
|
||||
public void setTopicName(String topicName) {
|
||||
this.topicName = topicName;
|
||||
}
|
||||
|
||||
public Integer getReplicaNum() {
|
||||
return replicaNum;
|
||||
}
|
||||
|
||||
public void setReplicaNum(Integer replicaNum) {
|
||||
this.replicaNum = replicaNum;
|
||||
}
|
||||
|
||||
public Integer getPartitionNum() {
|
||||
return partitionNum;
|
||||
}
|
||||
|
||||
public void setPartitionNum(Integer partitionNum) {
|
||||
this.partitionNum = partitionNum;
|
||||
}
|
||||
|
||||
public Long getRetentionTime() {
|
||||
return retentionTime;
|
||||
}
|
||||
|
||||
public void setRetentionTime(Long retentionTime) {
|
||||
this.retentionTime = retentionTime;
|
||||
}
|
||||
|
||||
public Object getByteIn() {
|
||||
return byteIn;
|
||||
}
|
||||
|
||||
public void setByteIn(Object byteIn) {
|
||||
this.byteIn = byteIn;
|
||||
}
|
||||
|
||||
public Object getByteOut() {
|
||||
return byteOut;
|
||||
}
|
||||
|
||||
public void setByteOut(Object byteOut) {
|
||||
this.byteOut = byteOut;
|
||||
}
|
||||
|
||||
public Object getProduceRequest() {
|
||||
return produceRequest;
|
||||
}
|
||||
|
||||
public void setProduceRequest(Object produceRequest) {
|
||||
this.produceRequest = produceRequest;
|
||||
}
|
||||
|
||||
public String getAppName() {
|
||||
return appName;
|
||||
}
|
||||
|
||||
public void setAppName(String appName) {
|
||||
this.appName = appName;
|
||||
}
|
||||
|
||||
public String getAppId() {
|
||||
return appId;
|
||||
}
|
||||
|
||||
public void setAppId(String appId) {
|
||||
this.appId = appId;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public Long getUpdateTime() {
|
||||
return updateTime;
|
||||
}
|
||||
|
||||
public void setUpdateTime(Long updateTime) {
|
||||
this.updateTime = updateTime;
|
||||
}
|
||||
|
||||
public Long getLogicalClusterId() {
|
||||
return logicalClusterId;
|
||||
}
|
||||
|
||||
public void setLogicalClusterId(Long logicalClusterId) {
|
||||
this.logicalClusterId = logicalClusterId;
|
||||
}
|
||||
@ApiModelProperty(value = "高可用关系:1:主topic, 0:备topic , 其他:非高可用topic")
|
||||
private Integer haRelation;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
@@ -169,6 +70,7 @@ public class TopicOverviewVO {
|
||||
", description='" + description + '\'' +
|
||||
", updateTime=" + updateTime +
|
||||
", logicalClusterId=" + logicalClusterId +
|
||||
", haRelation=" + haRelation +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.vo.ha;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/29
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(description="HA集群-Topic信息")
|
||||
public class HaClusterTopicVO {
|
||||
@ApiModelProperty(value="当前查询的集群ID")
|
||||
private Long clusterId;
|
||||
|
||||
@ApiModelProperty(value="Topic名称")
|
||||
private String topicName;
|
||||
|
||||
@ApiModelProperty(value="生产Acl数量")
|
||||
private Integer produceAclNum;
|
||||
|
||||
@ApiModelProperty(value="消费Acl数量")
|
||||
private Integer consumeAclNum;
|
||||
|
||||
@ApiModelProperty(value="主集群ID")
|
||||
private Long activeClusterId;
|
||||
|
||||
@ApiModelProperty(value="备集群ID")
|
||||
private Long standbyClusterId;
|
||||
|
||||
@ApiModelProperty(value="主备状态")
|
||||
private Integer status;
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.vo.ha;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster.ClusterBaseVO;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/29
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(description="HA集群-集群信息")
|
||||
public class HaClusterVO extends ClusterBaseVO {
|
||||
@ApiModelProperty(value="broker数量")
|
||||
private Integer brokerNum;
|
||||
|
||||
@ApiModelProperty(value="topic数量")
|
||||
private Integer topicNum;
|
||||
|
||||
@ApiModelProperty(value="消费组数")
|
||||
private Integer consumerGroupNum;
|
||||
|
||||
@ApiModelProperty(value="region数")
|
||||
private Integer regionNum;
|
||||
|
||||
@ApiModelProperty(value="ControllerID")
|
||||
private Integer controllerId;
|
||||
|
||||
/**
|
||||
* @see com.xiaojukeji.kafka.manager.common.bizenum.ha.HaStatusEnum
|
||||
*/
|
||||
@ApiModelProperty(value="主备状态")
|
||||
private Integer haStatus;
|
||||
|
||||
@ApiModelProperty(value="主topic数")
|
||||
private Long activeTopicCount;
|
||||
|
||||
@ApiModelProperty(value="备topic数")
|
||||
private Long standbyTopicCount;
|
||||
|
||||
@ApiModelProperty(value="备集群信息")
|
||||
private HaClusterVO haClusterVO;
|
||||
|
||||
@ApiModelProperty(value="切换任务id")
|
||||
private Long haASSwitchJobId;
|
||||
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.vo.ha.job;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@ApiModel(description = "Job详情")
|
||||
public class HaJobDetailVO {
|
||||
@ApiModelProperty(value = "Topic名称")
|
||||
private String topicName;
|
||||
|
||||
@ApiModelProperty(value="主物理集群ID")
|
||||
private Long activeClusterPhyId;
|
||||
|
||||
@ApiModelProperty(value="主物理集群名称")
|
||||
private String activeClusterPhyName;
|
||||
|
||||
@ApiModelProperty(value="备物理集群ID")
|
||||
private Long standbyClusterPhyId;
|
||||
|
||||
@ApiModelProperty(value="备物理集群名称")
|
||||
private String standbyClusterPhyName;
|
||||
|
||||
@ApiModelProperty(value="Lag和")
|
||||
private Long sumLag;
|
||||
|
||||
@ApiModelProperty(value="状态")
|
||||
private Integer status;
|
||||
|
||||
@ApiModelProperty(value="超时时间配置")
|
||||
private Long timeoutUnitSecConfig;
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.vo.ha.job;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.entity.ao.ha.job.HaJobState;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@ApiModel(description = "Job状态")
|
||||
public class HaJobStateVO {
|
||||
@ApiModelProperty(value = "任务总数")
|
||||
private Integer jobNu;
|
||||
|
||||
@ApiModelProperty(value = "运行中的任务数")
|
||||
private Integer runningNu;
|
||||
|
||||
@ApiModelProperty(value = "超时运行中的任务数")
|
||||
private Integer runningInTimeoutNu;
|
||||
|
||||
@ApiModelProperty(value = "准备好待运行的任务数")
|
||||
private Integer waitingNu;
|
||||
|
||||
@ApiModelProperty(value = "运行成功的任务数")
|
||||
private Integer successNu;
|
||||
|
||||
@ApiModelProperty(value = "运行失败的任务数")
|
||||
private Integer failedNu;
|
||||
|
||||
@ApiModelProperty(value = "进度,[0 - 100]")
|
||||
private Integer progress;
|
||||
|
||||
public HaJobStateVO(HaJobState jobState) {
|
||||
this.jobNu = jobState.getTotal();
|
||||
this.runningNu = jobState.getDoing();
|
||||
this.runningInTimeoutNu = jobState.getDoingInTimeout();
|
||||
this.waitingNu = 0;
|
||||
this.successNu = jobState.getSuccess();
|
||||
this.failedNu = jobState.getFailed();
|
||||
|
||||
this.progress = jobState.getProgress();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/8
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(value = "集群的topic高可用状态")
|
||||
public class HaClusterTopicHaStatusVO {
|
||||
@ApiModelProperty(value = "物理集群ID")
|
||||
private Long clusterId;
|
||||
|
||||
@ApiModelProperty(value = "物理集群名称")
|
||||
private String clusterName;
|
||||
|
||||
@ApiModelProperty(value = "Topic名称")
|
||||
private String topicName;
|
||||
|
||||
@ApiModelProperty(value = "高可用关系:1:主topic, 0:备topic , 其他:非高可用topic")
|
||||
private Integer haRelation;
|
||||
|
||||
}
|
||||
@@ -2,6 +2,7 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@@ -10,6 +11,7 @@ import java.util.List;
|
||||
* @author zengqiao
|
||||
* @date 19/4/1
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(description = "Topic基本信息")
|
||||
public class TopicBasicVO {
|
||||
@ApiModelProperty(value = "集群id")
|
||||
@@ -57,125 +59,8 @@ public class TopicBasicVO {
|
||||
@ApiModelProperty(value = "所属region")
|
||||
private List<String> regionNameList;
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getAppId() {
|
||||
return appId;
|
||||
}
|
||||
|
||||
public void setAppId(String appId) {
|
||||
this.appId = appId;
|
||||
}
|
||||
|
||||
public String getAppName() {
|
||||
return appName;
|
||||
}
|
||||
|
||||
public void setAppName(String appName) {
|
||||
this.appName = appName;
|
||||
}
|
||||
|
||||
public Integer getPartitionNum() {
|
||||
return partitionNum;
|
||||
}
|
||||
|
||||
public void setPartitionNum(Integer partitionNum) {
|
||||
this.partitionNum = partitionNum;
|
||||
}
|
||||
|
||||
public Integer getReplicaNum() {
|
||||
return replicaNum;
|
||||
}
|
||||
|
||||
public void setReplicaNum(Integer replicaNum) {
|
||||
this.replicaNum = replicaNum;
|
||||
}
|
||||
|
||||
public String getPrincipals() {
|
||||
return principals;
|
||||
}
|
||||
|
||||
public void setPrincipals(String principals) {
|
||||
this.principals = principals;
|
||||
}
|
||||
|
||||
public Long getRetentionTime() {
|
||||
return retentionTime;
|
||||
}
|
||||
|
||||
public void setRetentionTime(Long retentionTime) {
|
||||
this.retentionTime = retentionTime;
|
||||
}
|
||||
|
||||
public Long getRetentionBytes() {
|
||||
return retentionBytes;
|
||||
}
|
||||
|
||||
public void setRetentionBytes(Long retentionBytes) {
|
||||
this.retentionBytes = retentionBytes;
|
||||
}
|
||||
|
||||
public Long getCreateTime() {
|
||||
return createTime;
|
||||
}
|
||||
|
||||
public void setCreateTime(Long createTime) {
|
||||
this.createTime = createTime;
|
||||
}
|
||||
|
||||
public Long getModifyTime() {
|
||||
return modifyTime;
|
||||
}
|
||||
|
||||
public void setModifyTime(Long modifyTime) {
|
||||
this.modifyTime = modifyTime;
|
||||
}
|
||||
|
||||
public Integer getScore() {
|
||||
return score;
|
||||
}
|
||||
|
||||
public void setScore(Integer score) {
|
||||
this.score = score;
|
||||
}
|
||||
|
||||
public String getTopicCodeC() {
|
||||
return topicCodeC;
|
||||
}
|
||||
|
||||
public void setTopicCodeC(String topicCodeC) {
|
||||
this.topicCodeC = topicCodeC;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getBootstrapServers() {
|
||||
return bootstrapServers;
|
||||
}
|
||||
|
||||
public void setBootstrapServers(String bootstrapServers) {
|
||||
this.bootstrapServers = bootstrapServers;
|
||||
}
|
||||
|
||||
public List<String> getRegionNameList() {
|
||||
return regionNameList;
|
||||
}
|
||||
|
||||
public void setRegionNameList(List<String> regionNameList) {
|
||||
this.regionNameList = regionNameList;
|
||||
}
|
||||
@ApiModelProperty(value = "高可用关系:1:主topic, 0:备topic , 其他:非主备topic")
|
||||
private Integer haRelation;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
@@ -195,6 +80,7 @@ public class TopicBasicVO {
|
||||
", description='" + description + '\'' +
|
||||
", bootstrapServers='" + bootstrapServers + '\'' +
|
||||
", regionNameList=" + regionNameList +
|
||||
", haRelation=" + haRelation +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,11 +2,13 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* @author zhongyuankai,zengqiao
|
||||
* @date 20/4/8
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(value = "Topic连接信息")
|
||||
public class TopicConnectionVO {
|
||||
@ApiModelProperty(value = "集群ID")
|
||||
@@ -30,72 +32,12 @@ public class TopicConnectionVO {
|
||||
@ApiModelProperty(value = "客户端版本")
|
||||
private String clientVersion;
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
@ApiModelProperty(value = "客户端ID")
|
||||
private String clientId;
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
@ApiModelProperty(value = "连接Broker时间")
|
||||
private Long realConnectTime;
|
||||
|
||||
public String getTopicName() {
|
||||
return topicName;
|
||||
}
|
||||
|
||||
public void setTopicName(String topicName) {
|
||||
this.topicName = topicName;
|
||||
}
|
||||
|
||||
public String getAppId() {
|
||||
return appId;
|
||||
}
|
||||
|
||||
public void setAppId(String appId) {
|
||||
this.appId = appId;
|
||||
}
|
||||
|
||||
public String getIp() {
|
||||
return ip;
|
||||
}
|
||||
|
||||
public void setIp(String ip) {
|
||||
this.ip = ip;
|
||||
}
|
||||
|
||||
public String getHostname() {
|
||||
return hostname;
|
||||
}
|
||||
|
||||
public void setHostname(String hostname) {
|
||||
this.hostname = hostname;
|
||||
}
|
||||
|
||||
public String getClientType() {
|
||||
return clientType;
|
||||
}
|
||||
|
||||
public void setClientType(String clientType) {
|
||||
this.clientType = clientType;
|
||||
}
|
||||
|
||||
public String getClientVersion() {
|
||||
return clientVersion;
|
||||
}
|
||||
|
||||
public void setClientVersion(String clientVersion) {
|
||||
this.clientVersion = clientVersion;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TopicConnectionVO{" +
|
||||
"clusterId=" + clusterId +
|
||||
", topicName='" + topicName + '\'' +
|
||||
", appId='" + appId + '\'' +
|
||||
", ip='" + ip + '\'' +
|
||||
", hostname='" + hostname + '\'' +
|
||||
", clientType='" + clientType + '\'' +
|
||||
", clientVersion='" + clientVersion + '\'' +
|
||||
'}';
|
||||
}
|
||||
@ApiModelProperty(value = "创建时间")
|
||||
private Long createTime;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/8
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(value = "Topic信息")
|
||||
public class TopicHaVO {
|
||||
@ApiModelProperty(value = "物理集群ID")
|
||||
private Long clusterId;
|
||||
|
||||
@ApiModelProperty(value = "物理集群名称")
|
||||
private String clusterName;
|
||||
|
||||
@ApiModelProperty(value = "Topic名称")
|
||||
private String topicName;
|
||||
|
||||
@ApiModelProperty(value = "高可用关系:1:主topic, 0:备topic , 其他:非高可用topic")
|
||||
private Integer haRelation;
|
||||
|
||||
}
|
||||
@@ -2,6 +2,7 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.rd;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
@@ -10,6 +11,7 @@ import java.util.Properties;
|
||||
* @author zengqiao
|
||||
* @date 20/6/10
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(description = "Topic基本信息(RD视角)")
|
||||
public class RdTopicBasicVO {
|
||||
@ApiModelProperty(value = "集群ID")
|
||||
@@ -39,77 +41,8 @@ public class RdTopicBasicVO {
|
||||
@ApiModelProperty(value = "所属region")
|
||||
private List<String> regionNameList;
|
||||
|
||||
public Long getClusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
|
||||
public void setClusterId(Long clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
}
|
||||
|
||||
public String getClusterName() {
|
||||
return clusterName;
|
||||
}
|
||||
|
||||
public void setClusterName(String clusterName) {
|
||||
this.clusterName = clusterName;
|
||||
}
|
||||
|
||||
public String getTopicName() {
|
||||
return topicName;
|
||||
}
|
||||
|
||||
public void setTopicName(String topicName) {
|
||||
this.topicName = topicName;
|
||||
}
|
||||
|
||||
public Long getRetentionTime() {
|
||||
return retentionTime;
|
||||
}
|
||||
|
||||
public void setRetentionTime(Long retentionTime) {
|
||||
this.retentionTime = retentionTime;
|
||||
}
|
||||
|
||||
public String getAppId() {
|
||||
return appId;
|
||||
}
|
||||
|
||||
public void setAppId(String appId) {
|
||||
this.appId = appId;
|
||||
}
|
||||
|
||||
public String getAppName() {
|
||||
return appName;
|
||||
}
|
||||
|
||||
public void setAppName(String appName) {
|
||||
this.appName = appName;
|
||||
}
|
||||
|
||||
public Properties getProperties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
public void setProperties(Properties properties) {
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public List<String> getRegionNameList() {
|
||||
return regionNameList;
|
||||
}
|
||||
|
||||
public void setRegionNameList(List<String> regionNameList) {
|
||||
this.regionNameList = regionNameList;
|
||||
}
|
||||
@ApiModelProperty(value = "高可用关系:1:主topic, 0:备topic , 其他:非主备topic")
|
||||
private Integer haRelation;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
@@ -122,7 +55,8 @@ public class RdTopicBasicVO {
|
||||
", appName='" + appName + '\'' +
|
||||
", properties=" + properties +
|
||||
", description='" + description + '\'' +
|
||||
", regionNameList='" + regionNameList + '\'' +
|
||||
", regionNameList=" + regionNameList +
|
||||
", haRelation=" + haRelation +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.vo.rd.app;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/5/4
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@ApiModel(description="App关联Topic信息")
|
||||
public class AppRelateTopicsVO {
|
||||
@ApiModelProperty(value="物理集群ID")
|
||||
private Long clusterPhyId;
|
||||
|
||||
@ApiModelProperty(value="kafkaUser")
|
||||
private String kafkaUser;
|
||||
|
||||
@ApiModelProperty(value="clientId")
|
||||
private String clientId;
|
||||
|
||||
@ApiModelProperty(value="已建立HA的Client")
|
||||
private List<String> haClientIdList;
|
||||
|
||||
@ApiModelProperty(value="选中的Topic列表")
|
||||
private List<String> selectedTopicNameList;
|
||||
|
||||
@ApiModelProperty(value="未选中的Topic列表")
|
||||
private List<String> notSelectTopicNameList;
|
||||
|
||||
@ApiModelProperty(value="未建立HA的Topic列表")
|
||||
private List<String> notHaTopicNameList;
|
||||
|
||||
public AppRelateTopicsVO(Long clusterPhyId, String kafkaUser, String clientId) {
|
||||
this.clusterPhyId = clusterPhyId;
|
||||
this.kafkaUser = kafkaUser;
|
||||
this.clientId = clientId;
|
||||
this.selectedTopicNameList = new ArrayList<>();
|
||||
this.notSelectTopicNameList = new ArrayList<>();
|
||||
this.notHaTopicNameList = new ArrayList<>();
|
||||
}
|
||||
|
||||
public void addSelectedIfNotExist(String topicName) {
|
||||
if (selectedTopicNameList.contains(topicName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
selectedTopicNameList.add(topicName);
|
||||
}
|
||||
|
||||
public void addNotSelectedIfNotExist(String topicName) {
|
||||
if (notSelectTopicNameList.contains(topicName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
notSelectTopicNameList.add(topicName);
|
||||
}
|
||||
|
||||
public void addNotHaIfNotExist(String topicName) {
|
||||
if (notHaTopicNameList.contains(topicName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
notHaTopicNameList.add(topicName);
|
||||
}
|
||||
}
|
||||
@@ -2,11 +2,13 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.rd.cluster;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 20/4/23
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(description="集群信息")
|
||||
public class ClusterDetailVO extends ClusterBaseVO {
|
||||
@ApiModelProperty(value="Broker数")
|
||||
@@ -24,45 +26,11 @@ public class ClusterDetailVO extends ClusterBaseVO {
|
||||
@ApiModelProperty(value="Region数")
|
||||
private Integer regionNum;
|
||||
|
||||
public Integer getBrokerNum() {
|
||||
return brokerNum;
|
||||
}
|
||||
@ApiModelProperty(value = "高可用关系:1:主, 0:备 , 其他:非高可用")
|
||||
private Integer haRelation;
|
||||
|
||||
public void setBrokerNum(Integer brokerNum) {
|
||||
this.brokerNum = brokerNum;
|
||||
}
|
||||
|
||||
public Integer getTopicNum() {
|
||||
return topicNum;
|
||||
}
|
||||
|
||||
public void setTopicNum(Integer topicNum) {
|
||||
this.topicNum = topicNum;
|
||||
}
|
||||
|
||||
public Integer getConsumerGroupNum() {
|
||||
return consumerGroupNum;
|
||||
}
|
||||
|
||||
public void setConsumerGroupNum(Integer consumerGroupNum) {
|
||||
this.consumerGroupNum = consumerGroupNum;
|
||||
}
|
||||
|
||||
public Integer getControllerId() {
|
||||
return controllerId;
|
||||
}
|
||||
|
||||
public void setControllerId(Integer controllerId) {
|
||||
this.controllerId = controllerId;
|
||||
}
|
||||
|
||||
public Integer getRegionNum() {
|
||||
return regionNum;
|
||||
}
|
||||
|
||||
public void setRegionNum(Integer regionNum) {
|
||||
this.regionNum = regionNum;
|
||||
}
|
||||
@ApiModelProperty(value = "互备集群名称")
|
||||
private String mutualBackupClusterName;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
@@ -72,6 +40,8 @@ public class ClusterDetailVO extends ClusterBaseVO {
|
||||
", consumerGroupNum=" + consumerGroupNum +
|
||||
", controllerId=" + controllerId +
|
||||
", regionNum=" + regionNum +
|
||||
"} " + super.toString();
|
||||
", haRelation=" + haRelation +
|
||||
", mutualBackupClusterName='" + mutualBackupClusterName + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.vo.rd.job;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@ApiModel(description = "Job日志")
|
||||
public class JobLogVO {
|
||||
@ApiModelProperty(value = "日志ID")
|
||||
protected Long id;
|
||||
|
||||
@ApiModelProperty(value = "业务类型")
|
||||
private Integer bizType;
|
||||
|
||||
@ApiModelProperty(value = "业务关键字")
|
||||
private String bizKeyword;
|
||||
|
||||
@ApiModelProperty(value = "打印时间")
|
||||
private Date printTime;
|
||||
|
||||
@ApiModelProperty(value = "内容")
|
||||
private String content;
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
package com.xiaojukeji.kafka.manager.common.entity.vo.rd.job;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@ApiModel(description = "Job日志")
|
||||
public class JobMulLogVO {
|
||||
@ApiModelProperty(value = "末尾日志ID")
|
||||
private Long endLogId;
|
||||
|
||||
@ApiModelProperty(value = "日志信息")
|
||||
private List<JobLogVO> logList;
|
||||
|
||||
public JobMulLogVO(List<JobLogVO> logList, Long startLogId) {
|
||||
this.logList = logList == null? new ArrayList<>(): logList;
|
||||
if (!this.logList.isEmpty()) {
|
||||
this.endLogId = this.logList.stream().map(elem -> elem.id).reduce(Long::max).get() + 1;
|
||||
} else {
|
||||
this.endLogId = startLogId;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.xiaojukeji.kafka.manager.common.events;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.RegionDO;
|
||||
import lombok.Getter;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
|
||||
/**
|
||||
* Region创建事件
|
||||
* @author zengqiao
|
||||
* @date 22/01/1
|
||||
*/
|
||||
@Getter
|
||||
public class RegionCreatedEvent extends ApplicationEvent {
|
||||
private final RegionDO regionDO;
|
||||
|
||||
public RegionCreatedEvent(Object source, RegionDO regionDO) {
|
||||
super(source);
|
||||
this.regionDO = regionDO;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user