mirror of
https://github.com/didi/KnowStreaming.git
synced 2026-01-02 02:02:13 +08:00
Compare commits
214 Commits
v3.0.0-bet
...
v3.0.0-bet
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
41637dc1e5 | ||
|
|
feac0a058f | ||
|
|
27eeac9fd4 | ||
|
|
a14db4b194 | ||
|
|
54ee271a47 | ||
|
|
a3a9be4f7f | ||
|
|
d4f0a832f3 | ||
|
|
7dc533372c | ||
|
|
1737d87713 | ||
|
|
dbb98dea11 | ||
|
|
802b382b36 | ||
|
|
fc82999d45 | ||
|
|
08aa000c07 | ||
|
|
39015b5100 | ||
|
|
0d635ad419 | ||
|
|
9133205915 | ||
|
|
725ac10c3d | ||
|
|
2b76358c8f | ||
|
|
833c360698 | ||
|
|
7da1e67b01 | ||
|
|
7eb86a47dd | ||
|
|
d67e383c28 | ||
|
|
8749d3e1f5 | ||
|
|
30fba21c48 | ||
|
|
d83d35aee9 | ||
|
|
1d3caeea7d | ||
|
|
fbfa0d2d2a | ||
|
|
e626b99090 | ||
|
|
203859b71b | ||
|
|
9a25c22f3a | ||
|
|
0a03f41a7c | ||
|
|
56191939c8 | ||
|
|
beb754aaaa | ||
|
|
f234f740ca | ||
|
|
e14679694c | ||
|
|
e06712397e | ||
|
|
b6c6df7ffc | ||
|
|
375c6f56c9 | ||
|
|
0bf85c97b5 | ||
|
|
630e582321 | ||
|
|
a89fe23bdd | ||
|
|
a7a5fa9a31 | ||
|
|
c73a7eee2f | ||
|
|
121f8468d5 | ||
|
|
7b0b6936e0 | ||
|
|
597ea04a96 | ||
|
|
f7f90aeaaa | ||
|
|
227479f695 | ||
|
|
6477fb3fe0 | ||
|
|
4223f4f3c4 | ||
|
|
7288874d72 | ||
|
|
68f76f2daf | ||
|
|
fe6ddebc49 | ||
|
|
12b5acd073 | ||
|
|
a6f1fe07b3 | ||
|
|
85e3f2a946 | ||
|
|
d4f416de14 | ||
|
|
0d9a6702c1 | ||
|
|
d11285cdbf | ||
|
|
5f1f33d2b9 | ||
|
|
474daf752d | ||
|
|
27d1b92690 | ||
|
|
65499443c2 | ||
|
|
993afa4c19 | ||
|
|
6515dd28aa | ||
|
|
028d891c32 | ||
|
|
0df55ec22d | ||
|
|
579f64774d | ||
|
|
792f8d939d | ||
|
|
e4fb02fcda | ||
|
|
0c14c641d0 | ||
|
|
dba671fd1e | ||
|
|
80d1693722 | ||
|
|
26014a11b2 | ||
|
|
848fddd55a | ||
|
|
97f5f05f1a | ||
|
|
25b82810f2 | ||
|
|
9b1e506fa7 | ||
|
|
7a42996e97 | ||
|
|
dbfcebcf67 | ||
|
|
37c3f69a28 | ||
|
|
5d412890b4 | ||
|
|
1e318a4c40 | ||
|
|
d4549176ec | ||
|
|
61efdf492f | ||
|
|
67ea4d44c8 | ||
|
|
fdae05a4aa | ||
|
|
5efb837ee8 | ||
|
|
584b626d93 | ||
|
|
de25a4ed8e | ||
|
|
2e852e5ca6 | ||
|
|
b11000715a | ||
|
|
b3f8b46f0f | ||
|
|
8d22a0664a | ||
|
|
20756a3453 | ||
|
|
c9b4d45a64 | ||
|
|
83f7f5468b | ||
|
|
59c042ad67 | ||
|
|
d550fc5068 | ||
|
|
6effba69a0 | ||
|
|
9b46956259 | ||
|
|
b5a4a732da | ||
|
|
487862367e | ||
|
|
5b63b9ce67 | ||
|
|
afbcd3e1df | ||
|
|
12b82c1395 | ||
|
|
863b765e0d | ||
|
|
731429c51c | ||
|
|
66f3bc61fe | ||
|
|
4efe35dd51 | ||
|
|
c92461ef93 | ||
|
|
405e6e0c1d | ||
|
|
0d227aef49 | ||
|
|
0e49002f42 | ||
|
|
2e016800e0 | ||
|
|
09f317b991 | ||
|
|
5a48cb1547 | ||
|
|
f632febf33 | ||
|
|
3c53467943 | ||
|
|
d358c0f4f7 | ||
|
|
de977a5b32 | ||
|
|
703d685d59 | ||
|
|
31a5f17408 | ||
|
|
c40ae3c455 | ||
|
|
b71a34279e | ||
|
|
8f8c0c4eda | ||
|
|
3a384f0e34 | ||
|
|
cf7bc11cbd | ||
|
|
be60ae8399 | ||
|
|
8e50d145d5 | ||
|
|
7a3d15525c | ||
|
|
64f32d8b24 | ||
|
|
949d6ba605 | ||
|
|
ceb8db09f4 | ||
|
|
ed05a0ebb8 | ||
|
|
a7cbb76655 | ||
|
|
93cbfa0b1f | ||
|
|
6120613a98 | ||
|
|
dbd00db159 | ||
|
|
befde952f5 | ||
|
|
1aa759e5be | ||
|
|
13354145fc | ||
|
|
2de27719c1 | ||
|
|
21db57b537 | ||
|
|
dfe8d09477 | ||
|
|
90dfa22c64 | ||
|
|
0f35427645 | ||
|
|
7909f60ff8 | ||
|
|
9a1a8a4c30 | ||
|
|
fa7ad64140 | ||
|
|
0b376bd69c | ||
|
|
8a0c23339d | ||
|
|
e7ab3aff16 | ||
|
|
d0948797b9 | ||
|
|
04a5e17451 | ||
|
|
47065c8042 | ||
|
|
488c778736 | ||
|
|
d10a7bcc75 | ||
|
|
afe44a2537 | ||
|
|
9eadafe850 | ||
|
|
dab3eefcc0 | ||
|
|
2b9a6b28d8 | ||
|
|
465f98ca2b | ||
|
|
a0312be4fd | ||
|
|
4a5161372b | ||
|
|
4c9921f752 | ||
|
|
6dd72d40ee | ||
|
|
db49c234bb | ||
|
|
4a9df0c4d9 | ||
|
|
461573c2ba | ||
|
|
291992753f | ||
|
|
fcefe7ac38 | ||
|
|
7da712fcff | ||
|
|
2fd8687624 | ||
|
|
639b1f8336 | ||
|
|
ab3b83e42a | ||
|
|
4818629c40 | ||
|
|
61784c860a | ||
|
|
d5667254f2 | ||
|
|
af2b93983f | ||
|
|
8281301cbd | ||
|
|
0043ab8371 | ||
|
|
500eaace82 | ||
|
|
28e8540c78 | ||
|
|
69adf682e2 | ||
|
|
69cd1ff6e1 | ||
|
|
415d67cc32 | ||
|
|
46a2fec79b | ||
|
|
560b322fca | ||
|
|
effe17ac85 | ||
|
|
7699acfc1b | ||
|
|
6e058240b3 | ||
|
|
f005c6bc44 | ||
|
|
7be462599f | ||
|
|
271ab432d9 | ||
|
|
4114777a4e | ||
|
|
9189a54442 | ||
|
|
b95ee762e3 | ||
|
|
9e3c4dc06b | ||
|
|
1891a3ac86 | ||
|
|
9ecdcac06d | ||
|
|
790cb6a2e1 | ||
|
|
4a98e5f025 | ||
|
|
507abc1d84 | ||
|
|
9b732fbbad | ||
|
|
220f1c6fc3 | ||
|
|
7a950c67b6 | ||
|
|
78f625dc8c | ||
|
|
211d26a3ed | ||
|
|
dce2bc6326 | ||
|
|
90e5d7f6f0 | ||
|
|
fc835e09c6 | ||
|
|
c6e782a637 | ||
|
|
1ddfbfc833 |
10
README.md
10
README.md
@@ -51,16 +51,16 @@
|
|||||||
- 无需侵入改造 `Apache Kafka` ,一键便能纳管 `0.10.x` ~ `3.x.x` 众多版本的Kafka,包括 `ZK` 或 `Raft` 运行模式的版本,同时在兼容架构上具备良好的扩展性,帮助您提升集群管理水平;
|
- 无需侵入改造 `Apache Kafka` ,一键便能纳管 `0.10.x` ~ `3.x.x` 众多版本的Kafka,包括 `ZK` 或 `Raft` 运行模式的版本,同时在兼容架构上具备良好的扩展性,帮助您提升集群管理水平;
|
||||||
|
|
||||||
- 🌪️ **零成本、界面化**
|
- 🌪️ **零成本、界面化**
|
||||||
- 提炼高频 CLI 能力,设计合理的产品路径,提供清新美观的 GUI 界面,支持 Cluster、Broker、Topic、Group、Message、ACL 等组件 GUI 管理,普通用户5分钟即可上手;
|
- 提炼高频 CLI 能力,设计合理的产品路径,提供清新美观的 GUI 界面,支持 Cluster、Broker、Zookeeper、Topic、ConsumerGroup、Message、ACL、Connect 等组件 GUI 管理,普通用户5分钟即可上手;
|
||||||
|
|
||||||
- 👏 **云原生、插件化**
|
- 👏 **云原生、插件化**
|
||||||
- 基于云原生构建,具备水平扩展能力,只需要增加节点即可获取更强的采集及对外服务能力,提供众多可热插拔的企业级特性,覆盖可观测性生态整合、资源治理、多活容灾等核心场景;
|
- 基于云原生构建,具备水平扩展能力,只需要增加节点即可获取更强的采集及对外服务能力,提供众多可热插拔的企业级特性,覆盖可观测性生态整合、资源治理、多活容灾等核心场景;
|
||||||
|
|
||||||
- 🚀 **专业能力**
|
- 🚀 **专业能力**
|
||||||
- 集群管理:支持集群一键纳管,健康分析、核心组件观测 等功能;
|
- 集群管理:支持一键纳管,健康分析、核心组件观测 等功能;
|
||||||
- 观测提升:多维度指标观测大盘、观测指标最佳实践 等功能;
|
- 观测提升:多维度指标观测大盘、观测指标最佳实践 等功能;
|
||||||
- 异常巡检:集群多维度健康巡检、集群多维度健康分 等功能;
|
- 异常巡检:集群多维度健康巡检、集群多维度健康分 等功能;
|
||||||
- 能力增强:Topic扩缩副本、Topic副本迁移 等功能;
|
- 能力增强:集群负载均衡、Topic扩缩副本、Topic副本迁移 等功能;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -133,3 +133,7 @@ PS: 提问请尽量把问题一次性描述清楚,并告知环境信息情况
|
|||||||
**`2、微信群`**
|
**`2、微信群`**
|
||||||
|
|
||||||
微信加群:添加`mike_zhangliang`、`PenceXie`的微信号备注KnowStreaming加群。
|
微信加群:添加`mike_zhangliang`、`PenceXie`的微信号备注KnowStreaming加群。
|
||||||
|
|
||||||
|
## Star History
|
||||||
|
|
||||||
|
[](https://star-history.com/#didi/KnowStreaming&Date)
|
||||||
|
|||||||
@@ -1,6 +1,132 @@
|
|||||||
|
|
||||||
|
|
||||||
## v3.0.0-beta
|
## v3.0.0-beta.3
|
||||||
|
|
||||||
|
**文档**
|
||||||
|
- FAQ 补充权限识别失败问题的说明
|
||||||
|
- 同步更新文档,保持与官网一致
|
||||||
|
|
||||||
|
|
||||||
|
**Bug修复**
|
||||||
|
- Offset 信息获取时,过滤掉无 Leader 的分区
|
||||||
|
- 升级 oshi-core 版本至 5.6.1 版本,修复 Windows 系统获取系统指标失败问题
|
||||||
|
- 修复 JMX 连接被关闭后,未进行重建的问题
|
||||||
|
- 修复因 DB 中 Broker 信息不存在导致 TotalLogSize 指标获取时抛空指针问题
|
||||||
|
- 修复 dml-logi.sql 中,SQL 注释错误的问题
|
||||||
|
- 修复 startup.sh 中,识别操作系统类型错误的问题
|
||||||
|
- 修复配置管理页面删除配置失败的问题
|
||||||
|
- 修复系统管理应用文件引用路径
|
||||||
|
- 修复 Topic Messages 详情提示信息点击跳转 404 的问题
|
||||||
|
- 修复扩副本时,当前副本数不显示问题
|
||||||
|
|
||||||
|
|
||||||
|
**体验优化**
|
||||||
|
- Topic-Messages 页面,增加返回数据的排序以及按照Earliest/Latest的获取方式
|
||||||
|
- 优化 GroupOffsetResetEnum 类名为 OffsetTypeEnum,使得类名含义更准确
|
||||||
|
- 移动 KafkaZKDAO 类,及 Kafka Znode 实体类的位置,使得 Kafka Zookeeper DAO 更加内聚及便于识别
|
||||||
|
- 后端补充 Overview 页面指标排序的功能
|
||||||
|
- 前端 Webpack 配置优化
|
||||||
|
- Cluster Overview 图表取消放大展示功能
|
||||||
|
- 列表页增加手动刷新功能
|
||||||
|
- 接入/编辑集群,优化 JMX-PORT,Version 信息的回显,优化JMX信息的展示
|
||||||
|
- 提高登录页面图片展示清晰度
|
||||||
|
- 部分样式和文案优化
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## v3.0.0-beta.2
|
||||||
|
|
||||||
|
**文档**
|
||||||
|
- 新增登录系统对接文档
|
||||||
|
- 优化前端工程打包构建部分文档说明
|
||||||
|
- FAQ补充KnowStreaming连接特定JMX IP的说明
|
||||||
|
|
||||||
|
|
||||||
|
**Bug修复**
|
||||||
|
- 修复logi_security_oplog表字段过短,导致删除Topic等操作无法记录的问题
|
||||||
|
- 修复ES查询时,抛java.lang.NumberFormatException: For input string: "{"value":0,"relation":"eq"}" 问题
|
||||||
|
- 修复LogStartOffset和LogEndOffset指标单位错误问题
|
||||||
|
- 修复进行副本变更时,旧副本数为NULL的问题
|
||||||
|
- 修复集群Group列表,在第二页搜索时,搜索时返回的分页信息错误问题
|
||||||
|
- 修复重置Offset时,返回的错误信息提示不一致的问题
|
||||||
|
- 修复集群查看,系统查看,LoadRebalance等页面权限点缺失问题
|
||||||
|
- 修复查询不存在的Topic时,错误信息提示不明显的问题
|
||||||
|
- 修复Windows用户打包前端工程报错的问题
|
||||||
|
- package-lock.json锁定前端依赖版本号,修复因依赖自动升级导致打包失败等问题
|
||||||
|
- 系统管理子应用,补充后端返回的Code码拦截,解决后端接口返回报错不展示的问题
|
||||||
|
- 修复用户登出后,依旧可以访问系统的问题
|
||||||
|
- 修复巡检任务配置时,数值显示错误的问题
|
||||||
|
- 修复Broker/Topic Overview 图表和图表详情问题
|
||||||
|
- 修复Job扩缩副本任务明细数据错误的问题
|
||||||
|
- 修复重置Offset时,分区ID,Offset数值无限制问题
|
||||||
|
- 修复扩缩/迁移副本时,无法选中Kafka系统Topic的问题
|
||||||
|
- 修复Topic的Config页面,编辑表单时不能正确回显当前值的问题
|
||||||
|
- 修复Broker Card返回数据后依旧展示加载态的问题
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
**体验优化**
|
||||||
|
- 优化默认用户密码为 admin/admin
|
||||||
|
- 缩短新增集群后,集群信息加载的耗时
|
||||||
|
- 集群Broker列表,增加Controller角色信息
|
||||||
|
- 副本变更任务结束后,增加进行优先副本选举的操作
|
||||||
|
- Task模块任务分为Metrics、Common、Metadata三类任务,每类任务配备独立线程池,减少对Job模块的线程池,以及不同类任务之间的相互影响
|
||||||
|
- 删除代码中存在的多余无用文件
|
||||||
|
- 自动新增ES索引模版及近7天索引,减少用户搭建时需要做的事项
|
||||||
|
- 优化前端工程打包流程
|
||||||
|
- 优化登录页文案,页面左侧栏内容,单集群详情样式,Topic列表趋势图等
|
||||||
|
- 首次进入Broker/Topic图表详情时,进行预缓存数据从而优化体验
|
||||||
|
- 优化Topic详情Partition Tab的展示
|
||||||
|
- 多集群列表页增加编辑功能
|
||||||
|
- 优化副本变更时,迁移时间支持分钟级别粒度
|
||||||
|
- logi-security版本升级至2.10.13
|
||||||
|
- logi-elasticsearch-client版本升级至1.0.24
|
||||||
|
|
||||||
|
|
||||||
|
**能力提升**
|
||||||
|
- 支持Ldap登录认证
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## v3.0.0-beta.1
|
||||||
|
|
||||||
|
**文档**
|
||||||
|
- 新增Task模块说明文档
|
||||||
|
- FAQ补充 `Specified key was too long; max key length is 767 bytes ` 错误说明
|
||||||
|
- FAQ补充 `出现ESIndexNotFoundException报错` 错误说明
|
||||||
|
|
||||||
|
|
||||||
|
**Bug修复**
|
||||||
|
- 修复 Consumer 点击 Stop 未停止检索的问题
|
||||||
|
- 修复创建/编辑角色权限报错问题
|
||||||
|
- 修复多集群管理/单集群详情均衡卡片状态错误问题
|
||||||
|
- 修复版本列表未排序问题
|
||||||
|
- 修复Raft集群Controller信息不断记录问题
|
||||||
|
- 修复部分版本消费组描述信息获取失败问题
|
||||||
|
- 修复分区Offset获取失败的日志中,缺少Topic名称信息问题
|
||||||
|
- 修复GitHub图地址错误,及图裂问题
|
||||||
|
- 修复Broker默认使用的地址和注释不一致问题
|
||||||
|
- 修复 Consumer 列表分页不生效问题
|
||||||
|
- 修复操作记录表operation_methods字段缺少默认值问题
|
||||||
|
- 修复集群均衡表中move_broker_list字段无效的问题
|
||||||
|
- 修复KafkaUser、KafkaACL信息获取时,日志一直重复提示不支持问题
|
||||||
|
- 修复指标缺失时,曲线出现掉底的问题
|
||||||
|
|
||||||
|
|
||||||
|
**体验优化**
|
||||||
|
- 优化前端构建时间和打包体积,增加依赖打包的分包策略
|
||||||
|
- 优化产品样式和文案展示
|
||||||
|
- 优化ES客户端数为可配置
|
||||||
|
- 优化日志中大量出现的MySQL Key冲突日志
|
||||||
|
|
||||||
|
|
||||||
|
**能力提升**
|
||||||
|
- 增加周期任务,用于主动创建缺少的ES模版及索引的能力,减少额外的脚本操作
|
||||||
|
- 增加JMX连接的Broker地址可选择的能力
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## v3.0.0-beta.0
|
||||||
|
|
||||||
**1、多集群管理**
|
**1、多集群管理**
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ error_exit ()
|
|||||||
[ ! -e "$JAVA_HOME/bin/java" ] && unset JAVA_HOME
|
[ ! -e "$JAVA_HOME/bin/java" ] && unset JAVA_HOME
|
||||||
|
|
||||||
if [ -z "$JAVA_HOME" ]; then
|
if [ -z "$JAVA_HOME" ]; then
|
||||||
if $darwin; then
|
if [ "Darwin" = "$(uname -s)" ]; then
|
||||||
|
|
||||||
if [ -x '/usr/libexec/java_home' ] ; then
|
if [ -x '/usr/libexec/java_home' ] ; then
|
||||||
export JAVA_HOME=`/usr/libexec/java_home`
|
export JAVA_HOME=`/usr/libexec/java_home`
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 9.5 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 183 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 50 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 59 KiB |
264
docs/dev_guide/Task模块简介.md
Normal file
264
docs/dev_guide/Task模块简介.md
Normal file
@@ -0,0 +1,264 @@
|
|||||||
|
# Task模块简介
|
||||||
|
|
||||||
|
## 1、Task简介
|
||||||
|
|
||||||
|
在 KnowStreaming 中(下面简称KS),Task模块主要是用于执行一些周期任务,包括Cluster、Broker、Topic等指标的定时采集,集群元数据定时更新至DB,集群状态的健康巡检等。在KS中,与Task模块相关的代码,我们都统一存放在km-task模块中。
|
||||||
|
|
||||||
|
Task模块是基于 LogiCommon 中的Logi-Job组件实现的任务周期执行,Logi-Job 的功能类似 XXX-Job,它是 XXX-Job 在 KnowStreaming 的内嵌实现,主要用于简化 KnowStreaming 的部署。
|
||||||
|
Logi-Job 的任务总共有两种执行模式,分别是:
|
||||||
|
|
||||||
|
+ 广播模式:同一KS集群下,同一任务周期中,所有KS主机都会执行该定时任务。
|
||||||
|
+ 抢占模式:同一KS集群下,同一任务周期中,仅有某一台KS主机会执行该任务。
|
||||||
|
|
||||||
|
KS集群范围定义:连接同一个DB,且application.yml中的spring.logi-job.app-name的名称一样的KS主机为同一KS集群。
|
||||||
|
|
||||||
|
## 2、使用指南
|
||||||
|
|
||||||
|
Task模块基于Logi-Job的广播模式与抢占模式,分别实现了任务的抢占执行、重复执行以及均衡执行,他们之间的差别是:
|
||||||
|
|
||||||
|
+ 抢占执行:同一个KS集群,同一个任务执行周期中,仅有一台KS主机执行该任务;
|
||||||
|
+ 重复执行:同一个KS集群,同一个任务执行周期中,所有KS主机都执行该任务。比如3台KS主机,3个Kafka集群,此时每台KS主机都会去采集这3个Kafka集群的指标;
|
||||||
|
+ 均衡执行:同一个KS集群,同一个任务执行周期中,每台KS主机仅执行该任务的一部分,所有的KS主机共同协作完成了任务。比如3台KS主机,3个Kafka集群,稳定运行情况下,每台KS主机将仅采集1个Kafka集群的指标,3台KS主机共同完成3个Kafka集群指标的采集。
|
||||||
|
|
||||||
|
下面我们看一下具体例子。
|
||||||
|
|
||||||
|
### 2.1、抢占模式——抢占执行
|
||||||
|
|
||||||
|
功能说明:
|
||||||
|
|
||||||
|
+ 同一个KS集群,同一个任务执行周期中,仅有一台KS主机执行该任务。
|
||||||
|
|
||||||
|
代码例子:
|
||||||
|
|
||||||
|
```java
|
||||||
|
// 1、实现Job接口,重写excute方法;
|
||||||
|
// 2、在类上添加@Task注解,并且配置好信息,指定为随机抢占模式;
|
||||||
|
// 效果:KS集群中,每5秒,会有一台KS主机输出 "测试定时任务运行中";
|
||||||
|
@Task(name = "TestJob",
|
||||||
|
description = "测试定时任务",
|
||||||
|
cron = "*/5 * * * * ?",
|
||||||
|
autoRegister = true,
|
||||||
|
consensual = ConsensualEnum.RANDOM, // 这里一定要设置为RANDOM
|
||||||
|
timeout = 6 * 60)
|
||||||
|
public class TestJob implements Job {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TaskResult execute(JobContext jobContext) throws Exception {
|
||||||
|
|
||||||
|
System.out.println("测试定时任务运行中");
|
||||||
|
return new TaskResult();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### 2.2、广播模式——重复执行
|
||||||
|
|
||||||
|
功能说明:
|
||||||
|
|
||||||
|
+ 同一个KS集群,同一个任务执行周期中,所有KS主机都执行该任务。比如3台KS主机,3个Kafka集群,此时每台KS主机都会去重复采集这3个Kafka集群的指标。
|
||||||
|
|
||||||
|
代码例子:
|
||||||
|
|
||||||
|
```java
|
||||||
|
// 1、实现Job接口,重写excute方法;
|
||||||
|
// 2、在类上添加@Task注解,并且配置好信息,指定为广播抢占模式;
|
||||||
|
// 效果:KS集群中,每5秒,每台KS主机都会输出 "测试定时任务运行中";
|
||||||
|
@Task(name = "TestJob",
|
||||||
|
description = "测试定时任务",
|
||||||
|
cron = "*/5 * * * * ?",
|
||||||
|
autoRegister = true,
|
||||||
|
consensual = ConsensualEnum.BROADCAST, // 这里一定要设置为BROADCAST
|
||||||
|
timeout = 6 * 60)
|
||||||
|
public class TestJob implements Job {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TaskResult execute(JobContext jobContext) throws Exception {
|
||||||
|
|
||||||
|
System.out.println("测试定时任务运行中");
|
||||||
|
return new TaskResult();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### 2.3、广播模式——均衡执行
|
||||||
|
|
||||||
|
功能说明:
|
||||||
|
|
||||||
|
+ 同一个KS集群,同一个任务执行周期中,每台KS主机仅执行该任务的一部分,所有的KS主机共同协作完成了任务。比如3台KS主机,3个Kafka集群,稳定运行情况下,每台KS主机将仅采集1个Kafka集群的指标,3台KS主机共同完成3个Kafka集群指标的采集。
|
||||||
|
|
||||||
|
代码例子:
|
||||||
|
|
||||||
|
+ 该模式有点特殊,是KS基于Logi-Job的广播模式,做的一个扩展,以下为一个使用例子:
|
||||||
|
|
||||||
|
```java
|
||||||
|
// 1、继承AbstractClusterPhyDispatchTask,实现processSubTask方法;
|
||||||
|
// 2、在类上添加@Task注解,并且配置好信息,指定为广播模式;
|
||||||
|
// 效果:在本样例中,每隔1分钟ks会将所有的kafka集群列表在ks集群主机内均衡拆分,每台主机会将分发到自身的Kafka集群依次执行processSubTask方法,实现KS集群的任务协同处理。
|
||||||
|
@Task(name = "kmJobTask",
|
||||||
|
description = "km job 模块调度执行任务",
|
||||||
|
cron = "0 0/1 * * * ? *",
|
||||||
|
autoRegister = true,
|
||||||
|
consensual = ConsensualEnum.BROADCAST,
|
||||||
|
timeout = 6 * 60)
|
||||||
|
public class KMJobTask extends AbstractClusterPhyDispatchTask {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private JobService jobService;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TaskResult processSubTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) throws Exception {
|
||||||
|
jobService.scheduleJobByClusterId(clusterPhy.getId());
|
||||||
|
return TaskResult.SUCCESS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## 3、原理简介
|
||||||
|
|
||||||
|
### 3.1、Task注解说明
|
||||||
|
|
||||||
|
```java
|
||||||
|
public @interface Task {
|
||||||
|
String name() default ""; //任务名称
|
||||||
|
String description() default ""; //任务描述
|
||||||
|
String owner() default "system"; //拥有者
|
||||||
|
String cron() default ""; //定时执行的时间策略
|
||||||
|
int retryTimes() default 0; //失败以后所能重试的最大次数
|
||||||
|
long timeout() default 0; //在超时时间里重试
|
||||||
|
//是否自动注册任务到数据库中
|
||||||
|
//如果设置为false,需要手动去数据库km_task表注册定时任务信息。数据库记录和@Task注解缺一不可
|
||||||
|
boolean autoRegister() default false;
|
||||||
|
//执行模式:广播、随机抢占
|
||||||
|
//广播模式:同一集群下的所有服务器都会执行该定时任务
|
||||||
|
//随机抢占模式:同一集群下随机一台服务器执行该任务
|
||||||
|
ConsensualEnum consensual() default ConsensualEnum.RANDOM;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.2、数据库表介绍
|
||||||
|
|
||||||
|
+ logi_task:记录项目中的定时任务信息,一个定时任务对应一条记录。
|
||||||
|
+ logi_job:具体任务执行信息。
|
||||||
|
+ logi_job_log:定时任务的执行日志。
|
||||||
|
+ logi_worker:记录机器信息,实现集群控制。
|
||||||
|
|
||||||
|
### 3.3、均衡执行简介
|
||||||
|
|
||||||
|
#### 3.3.1、类关系图
|
||||||
|
|
||||||
|
这里以KMJobTask为例,简单介绍KM中的定时任务实现逻辑。
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
+ Job:使用logi组件实现定时任务,必须实现该接口。
|
||||||
|
+ Comparable & EntufyIdInterface:比较接口,实现任务的排序逻辑。
|
||||||
|
+ AbstractDispatchTask:实现广播模式下,任务的均衡分发。
|
||||||
|
+ AbstractClusterPhyDispatchTask:对分发到当前服务器的集群列表进行枚举。
|
||||||
|
+ KMJobTask:实现对单个集群的定时任务处理。
|
||||||
|
|
||||||
|
#### 3.3.2、关键类代码
|
||||||
|
|
||||||
|
+ **AbstractDispatchTask类**
|
||||||
|
|
||||||
|
```java
|
||||||
|
// 实现Job接口的抽象类,进行任务的负载均衡执行
|
||||||
|
public abstract class AbstractDispatchTask<E extends Comparable & EntifyIdInterface> implements Job {
|
||||||
|
|
||||||
|
// 罗列所有的任务
|
||||||
|
protected abstract List<E> listAllTasks();
|
||||||
|
|
||||||
|
// 执行被分配给该KS主机的任务
|
||||||
|
protected abstract TaskResult processTask(List<E> subTaskList, long triggerTimeUnitMs);
|
||||||
|
|
||||||
|
// 被Logi-Job触发执行该方法
|
||||||
|
// 该方法进行任务的分配
|
||||||
|
@Override
|
||||||
|
public TaskResult execute(JobContext jobContext) {
|
||||||
|
try {
|
||||||
|
|
||||||
|
long triggerTimeUnitMs = System.currentTimeMillis();
|
||||||
|
|
||||||
|
// 获取所有的任务
|
||||||
|
List<E> allTaskList = this.listAllTasks();
|
||||||
|
|
||||||
|
// 计算当前KS机器需要执行的任务
|
||||||
|
List<E> subTaskList = this.selectTask(allTaskList, jobContext.getAllWorkerCodes(), jobContext.getCurrentWorkerCode());
|
||||||
|
|
||||||
|
// 进行任务处理
|
||||||
|
return this.processTask(subTaskList, triggerTimeUnitMs);
|
||||||
|
} catch (Exception e) {
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
+ **AbstractClusterPhyDispatchTask类**
|
||||||
|
|
||||||
|
```java
|
||||||
|
// 继承AbstractDispatchTask的抽象类,对Kafka集群进行负载均衡执行
|
||||||
|
public abstract class AbstractClusterPhyDispatchTask extends AbstractDispatchTask<ClusterPhy> {
|
||||||
|
|
||||||
|
// 执行被分配的任务,具体由子类实现
|
||||||
|
protected abstract TaskResult processSubTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) throws Exception;
|
||||||
|
|
||||||
|
// 返回所有的Kafka集群
|
||||||
|
@Override
|
||||||
|
public List<ClusterPhy> listAllTasks() {
|
||||||
|
return clusterPhyService.listAllClusters();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 执行被分配给该KS主机的Kafka集群任务
|
||||||
|
@Override
|
||||||
|
public TaskResult processTask(List<ClusterPhy> subTaskList, long triggerTimeUnitMs) { // ... }
|
||||||
|
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
+ **KMJobTask类**
|
||||||
|
|
||||||
|
```java
|
||||||
|
// 加上@Task注解,并配置任务执行信息
|
||||||
|
@Task(name = "kmJobTask",
|
||||||
|
description = "km job 模块调度执行任务",
|
||||||
|
cron = "0 0/1 * * * ? *",
|
||||||
|
autoRegister = true,
|
||||||
|
consensual = ConsensualEnum.BROADCAST,
|
||||||
|
timeout = 6 * 60)
|
||||||
|
// 继承AbstractClusterPhyDispatchTask类
|
||||||
|
public class KMJobTask extends AbstractClusterPhyDispatchTask {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private JobService jobService;
|
||||||
|
|
||||||
|
// 执行该Kafka集群的Job模块的任务
|
||||||
|
@Override
|
||||||
|
protected TaskResult processSubTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) throws Exception {
|
||||||
|
jobService.scheduleJobByClusterId(clusterPhy.getId());
|
||||||
|
return TaskResult.SUCCESS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 3.3.3、均衡执行总结
|
||||||
|
|
||||||
|
均衡执行的实现原理总结起来就是以下几点:
|
||||||
|
|
||||||
|
+ Logi-Job设置为广播模式,触发所有的KS主机执行任务;
|
||||||
|
+ 每台KS主机,被触发执行后,按照统一的规则,对任务列表,KS集群主机列表进行排序。然后按照顺序将任务列表均衡的分配给排序后的KS集群主机。KS集群稳定运行情况下,这一步保证了每台KS主机之间分配到的任务列表不重复,不丢失。
|
||||||
|
+ 最后每台KS主机,执行被分配到的任务。
|
||||||
|
|
||||||
|
## 4、注意事项
|
||||||
|
|
||||||
|
+ 不能100%保证任务在一个周期内,且仅且执行一次,可能出现重复执行或丢失的情况,所以必须严格是且仅且执行一次的任务,不建议基于Logi-Job进行任务控制。
|
||||||
|
+ 尽量让Logi-Job仅负责任务的触发,后续的执行建议放到自己创建的线程池中进行。
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 600 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 228 KiB |
@@ -36,7 +36,7 @@ KS-KM 根据其需要纳管的 kafka 版本,按照上述三个维度构建了
|
|||||||
|
|
||||||
  KS-KM 的每个版本针对需要纳管的 kafka 版本列表,事先分析各个版本的差异性和产品需求,同时 KS-KM 构建了一套专门处理兼容性的服务,来进行兼容性的注册、字典构建、处理器分发等操作,其中版本兼容性处理器是来具体处理不同 kafka 版本差异性的地方。
|
  KS-KM 的每个版本针对需要纳管的 kafka 版本列表,事先分析各个版本的差异性和产品需求,同时 KS-KM 构建了一套专门处理兼容性的服务,来进行兼容性的注册、字典构建、处理器分发等操作,其中版本兼容性处理器是来具体处理不同 kafka 版本差异性的地方。
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
  如上图所示,KS-KM 的 topic 服务在面对不同 kafka 版本时,其 topic 的创建、删除、扩容由于 kafka 版本自身的差异,导致 KnowStreaming 的处理也不一样,所以需要根据不同的 kafka 版本来实现不同的兼容性处理器,同时向 KnowStreaming 的兼容服务进行兼容性的注册,构建兼容性字典,后续在 KnowStreaming 的运行过程中,针对不同的 kafka 版本即可分发到不同的处理器中执行。
|
  如上图所示,KS-KM 的 topic 服务在面对不同 kafka 版本时,其 topic 的创建、删除、扩容由于 kafka 版本自身的差异,导致 KnowStreaming 的处理也不一样,所以需要根据不同的 kafka 版本来实现不同的兼容性处理器,同时向 KnowStreaming 的兼容服务进行兼容性的注册,构建兼容性字典,后续在 KnowStreaming 的运行过程中,针对不同的 kafka 版本即可分发到不同的处理器中执行。
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@
|
|||||||
- 初始化 MySQL 表及数据
|
- 初始化 MySQL 表及数据
|
||||||
- 初始化 Elasticsearch 索引
|
- 初始化 Elasticsearch 索引
|
||||||
|
|
||||||
具体见:[快速开始](./1-quick-start.md) 中的最后一步,部署 KnowStreaming 服务中的初始化相关工作。
|
具体见:[单机部署手册](../install_guide/单机部署手册.md) 中的最后一步,部署 KnowStreaming 服务中的初始化相关工作。
|
||||||
|
|
||||||
### 6.1.4、本地启动
|
### 6.1.4、本地启动
|
||||||
|
|
||||||
@@ -73,7 +73,7 @@ km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/KnowStreaming.java
|
|||||||
IDEA 更多具体的配置如下图所示:
|
IDEA 更多具体的配置如下图所示:
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img src="./assets/startup_using_source_code/IDEA配置.jpg" width = "512" height = "318" div align=center />
|
<img src="http://img-ys011.didistatic.com/static/dc2img/do1_BW1RzgEMh4n6L4dL4ncl" width = "512" height = "318" div align=center />
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
**第四步:启动项目**
|
**第四步:启动项目**
|
||||||
@@ -84,7 +84,7 @@ IDEA 更多具体的配置如下图所示:
|
|||||||
|
|
||||||
`Know Streaming` 启动之后,可以访问一些信息,包括:
|
`Know Streaming` 启动之后,可以访问一些信息,包括:
|
||||||
|
|
||||||
- 产品页面:http://localhost:8080 ,默认账号密码:`admin` / `admin2022_` 进行登录。
|
- 产品页面:http://localhost:8080 ,默认账号密码:`admin` / `admin2022_` 进行登录。`v3.0.0-beta.2`版本开始,默认账号密码为`admin` / `admin`;
|
||||||
- 接口地址:http://localhost:8080/swagger-ui.html 查看后端提供的相关接口。
|
- 接口地址:http://localhost:8080/swagger-ui.html 查看后端提供的相关接口。
|
||||||
|
|
||||||
更多信息,详见:[KnowStreaming 官网](https://knowstreaming.com/)
|
更多信息,详见:[KnowStreaming 官网](https://knowstreaming.com/)
|
||||||
199
docs/dev_guide/登录系统对接.md
Normal file
199
docs/dev_guide/登录系统对接.md
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## 登录系统对接
|
||||||
|
|
||||||
|
[KnowStreaming](https://github.com/didi/KnowStreaming)(以下简称KS) 除了实现基于本地MySQL的用户登录认证方式外,还已经实现了基于Ldap的登录认证。
|
||||||
|
|
||||||
|
但是,登录认证系统并非仅此两种。因此,为了具有更好的拓展性,KS具有自定义登陆认证逻辑,快速对接已有系统的特性。
|
||||||
|
|
||||||
|
在KS中,我们将登陆认证相关的一些文件放在[km-extends](https://github.com/didi/KnowStreaming/tree/master/km-extends)模块下的[km-account](https://github.com/didi/KnowStreaming/tree/master/km-extends/km-account)模块里。
|
||||||
|
|
||||||
|
本文将介绍KS如何快速对接自有的用户登录认证系统。
|
||||||
|
|
||||||
|
### 对接步骤
|
||||||
|
|
||||||
|
- 创建一个登陆认证类,实现[LogiCommon](https://github.com/didi/LogiCommon)的LoginExtend接口;
|
||||||
|
- 将[application.yml](https://github.com/didi/KnowStreaming/blob/master/km-rest/src/main/resources/application.yml)中的spring.logi-security.login-extend-bean-name字段改为登陆认证类的bean名称;
|
||||||
|
|
||||||
|
```Java
|
||||||
|
//LoginExtend 接口
|
||||||
|
public interface LoginExtend {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 验证登录信息,同时记住登录状态
|
||||||
|
*/
|
||||||
|
UserBriefVO verifyLogin(AccountLoginDTO var1, HttpServletRequest var2, HttpServletResponse var3) throws LogiSecurityException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 登出接口,清楚登录状态
|
||||||
|
*/
|
||||||
|
Result<Boolean> logout(HttpServletRequest var1, HttpServletResponse var2);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 检查是否已经登录
|
||||||
|
*/
|
||||||
|
boolean interceptorCheck(HttpServletRequest var1, HttpServletResponse var2, String var3, List<String> var4) throws IOException;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### 对接例子
|
||||||
|
|
||||||
|
我们以Ldap对接为例,说明KS如何对接登录认证系统。
|
||||||
|
|
||||||
|
+ 编写[LdapLoginServiceImpl](https://github.com/didi/KnowStreaming/blob/master/km-extends/km-account/src/main/java/com/xiaojukeji/know/streaming/km/account/login/ldap/LdapLoginServiceImpl.java)类,实现LoginExtend接口。
|
||||||
|
+ 设置[application.yml](https://github.com/didi/KnowStreaming/blob/master/km-rest/src/main/resources/application.yml)中的spring.logi-security.login-extend-bean-name=ksLdapLoginService。
|
||||||
|
|
||||||
|
完成上述两步即可实现KS对接Ldap认证登陆。
|
||||||
|
|
||||||
|
```Java
|
||||||
|
@Service("ksLdapLoginService")
|
||||||
|
public class LdapLoginServiceImpl implements LoginExtend {
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public UserBriefVO verifyLogin(AccountLoginDTO loginDTO,
|
||||||
|
HttpServletRequest request,
|
||||||
|
HttpServletResponse response) throws LogiSecurityException {
|
||||||
|
String decodePasswd = AESUtils.decrypt(loginDTO.getPw());
|
||||||
|
|
||||||
|
// 去LDAP验证账密
|
||||||
|
LdapPrincipal ldapAttrsInfo = ldapAuthentication.authenticate(loginDTO.getUserName(), decodePasswd);
|
||||||
|
if (ldapAttrsInfo == null) {
|
||||||
|
// 用户不存在,正常来说上如果有问题,上一步会直接抛出异常
|
||||||
|
throw new LogiSecurityException(ResultCode.USER_NOT_EXISTS);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 进行业务相关操作
|
||||||
|
|
||||||
|
// 记录登录状态,Ldap因为无法记录登录状态,因此有KnowStreaming进行记录
|
||||||
|
initLoginContext(request, response, loginDTO.getUserName(), user.getId());
|
||||||
|
return CopyBeanUtil.copy(user, UserBriefVO.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result<Boolean> logout(HttpServletRequest request, HttpServletResponse response) {
|
||||||
|
|
||||||
|
//清理cookie和session
|
||||||
|
|
||||||
|
return Result.buildSucc(Boolean.TRUE);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean interceptorCheck(HttpServletRequest request, HttpServletResponse response, String requestMappingValue, List<String> whiteMappingValues) throws IOException {
|
||||||
|
|
||||||
|
// 检查是否已经登录
|
||||||
|
String userName = HttpRequestUtil.getOperator(request);
|
||||||
|
if (StringUtils.isEmpty(userName)) {
|
||||||
|
// 未登录,则进行登出
|
||||||
|
logout(request, response);
|
||||||
|
return Boolean.FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Boolean.TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### 实现原理
|
||||||
|
|
||||||
|
因为登陆和登出整体实现逻辑是一致的,所以我们以登陆逻辑为例进行介绍。
|
||||||
|
|
||||||
|
+ 登陆原理
|
||||||
|
|
||||||
|
登陆走的是[LogiCommon](https://github.com/didi/LogiCommon)自带的LoginController。
|
||||||
|
|
||||||
|
```java
|
||||||
|
@RestController
|
||||||
|
public class LoginController {
|
||||||
|
|
||||||
|
|
||||||
|
//登陆接口
|
||||||
|
@PostMapping({"/login"})
|
||||||
|
public Result<UserBriefVO> login(HttpServletRequest request, HttpServletResponse response, @RequestBody AccountLoginDTO loginDTO) {
|
||||||
|
try {
|
||||||
|
//登陆认证
|
||||||
|
UserBriefVO userBriefVO = this.loginService.verifyLogin(loginDTO, request, response);
|
||||||
|
return Result.success(userBriefVO);
|
||||||
|
|
||||||
|
} catch (LogiSecurityException var5) {
|
||||||
|
return Result.fail(var5);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
而登陆操作是调用LoginServiceImpl类来实现,但是具体由哪个登陆认证类来执行登陆操作却由loginExtendBeanTool来指定。
|
||||||
|
|
||||||
|
```java
|
||||||
|
//LoginServiceImpl类
|
||||||
|
@Service
|
||||||
|
public class LoginServiceImpl implements LoginService {
|
||||||
|
|
||||||
|
//实现登陆操作,但是具体哪个登陆类由loginExtendBeanTool来管理
|
||||||
|
public UserBriefVO verifyLogin(AccountLoginDTO loginDTO, HttpServletRequest request, HttpServletResponse response) throws LogiSecurityException {
|
||||||
|
|
||||||
|
return this.loginExtendBeanTool.getLoginExtendImpl().verifyLogin(loginDTO, request, response);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
而loginExtendBeanTool类会优先去查找用户指定的登陆认证类,如果失败则调用默认的登陆认证函数。
|
||||||
|
|
||||||
|
```java
|
||||||
|
//LoginExtendBeanTool类
|
||||||
|
@Component("logiSecurityLoginExtendBeanTool")
|
||||||
|
public class LoginExtendBeanTool {
|
||||||
|
|
||||||
|
public LoginExtend getLoginExtendImpl() {
|
||||||
|
LoginExtend loginExtend;
|
||||||
|
//先调用用户指定登陆类,如果失败则调用系统默认登陆认证
|
||||||
|
try {
|
||||||
|
//调用的类由spring.logi-security.login-extend-bean-name指定
|
||||||
|
loginExtend = this.getCustomLoginExtendImplBean();
|
||||||
|
} catch (UnsupportedOperationException var3) {
|
||||||
|
loginExtend = this.getDefaultLoginExtendImplBean();
|
||||||
|
}
|
||||||
|
|
||||||
|
return loginExtend;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
+ 认证原理
|
||||||
|
|
||||||
|
认证的实现则比较简单,向Spring中注册我们的拦截器PermissionInterceptor。
|
||||||
|
|
||||||
|
拦截器会调用LoginServiceImpl类的拦截方法,LoginServiceImpl后续处理逻辑就和前面登陆是一致的。
|
||||||
|
|
||||||
|
```java
|
||||||
|
public class PermissionInterceptor implements HandlerInterceptor {
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 拦截预处理
|
||||||
|
* @return boolean false:拦截, 不向下执行, true:放行
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
|
||||||
|
|
||||||
|
//免登录相关校验,如果验证通过,提前返回
|
||||||
|
|
||||||
|
//走拦截函数,进行普通用户验证
|
||||||
|
return loginService.interceptorCheck(request, response, classRequestMappingValue, whiteMappingValues);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
@@ -1,25 +1,20 @@
|
|||||||
|
|
||||||

|
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
## JMX-连接失败问题解决
|
## JMX-连接失败问题解决
|
||||||
|
|
||||||
- [JMX-连接失败问题解决](#jmx-连接失败问题解决)
|
集群正常接入`KnowStreaming`之后,即可以看到集群的Broker列表,此时如果查看不了Topic的实时流量,或者是Broker的实时流量信息时,那么大概率就是`JMX`连接的问题了。
|
||||||
- [1、问题&说明](#1问题说明)
|
|
||||||
- [2、解决方法](#2解决方法)
|
|
||||||
- [3、解决方法 —— 认证的JMX](#3解决方法--认证的jmx)
|
|
||||||
|
|
||||||
集群正常接入Logi-KafkaManager之后,即可以看到集群的Broker列表,此时如果查看不了Topic的实时流量,或者是Broker的实时流量信息时,那么大概率就是JMX连接的问题了。
|
|
||||||
|
|
||||||
下面我们按照步骤来一步一步的检查。
|
下面我们按照步骤来一步一步的检查。
|
||||||
|
|
||||||
### 1、问题&说明
|
### 1、问题说明
|
||||||
|
|
||||||
**类型一:JMX配置未开启**
|
**类型一:JMX配置未开启**
|
||||||
|
|
||||||
未开启时,直接到`2、解决方法`查看如何开启即可。
|
未开启时,直接到`2、解决方法`查看如何开启即可。
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
**类型二:配置错误**
|
**类型二:配置错误**
|
||||||
@@ -43,6 +38,26 @@ java.rmi.ConnectException: Connection refused to host: 192.168.0.1; nested excep
|
|||||||
java.rmi.ConnectException: Connection refused to host: 127.0.0.1;; nested exception is:
|
java.rmi.ConnectException: Connection refused to host: 127.0.0.1;; nested exception is:
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**类型三:连接特定IP**
|
||||||
|
|
||||||
|
Broker 配置了内外网,而JMX在配置时,可能配置了内网IP或者外网IP,此时 `KnowStreaming` 需要连接到特定网络的IP才可以进行访问。
|
||||||
|
|
||||||
|
比如:
|
||||||
|
|
||||||
|
Broker在ZK的存储结构如下所示,我们期望连接到 `endpoints` 中标记为 `INTERNAL` 的地址,但是 `KnowStreaming` 却连接了 `EXTERNAL` 的地址,此时可以看 `4、解决方法 —— JMX连接特定网络` 进行解决。
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"listener_security_protocol_map": {"EXTERNAL":"SASL_PLAINTEXT","INTERNAL":"SASL_PLAINTEXT"},
|
||||||
|
"endpoints": ["EXTERNAL://192.168.0.1:7092","INTERNAL://192.168.0.2:7093"],
|
||||||
|
"jmx_port": 8099,
|
||||||
|
"host": "192.168.0.1",
|
||||||
|
"timestamp": "1627289710439",
|
||||||
|
"port": -1,
|
||||||
|
"version": 4
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### 2、解决方法
|
### 2、解决方法
|
||||||
|
|
||||||
这里仅介绍一下比较通用的解决方式,如若有更好的方式,欢迎大家指导告知一下。
|
这里仅介绍一下比较通用的解决方式,如若有更好的方式,欢迎大家指导告知一下。
|
||||||
@@ -76,26 +91,36 @@ fi
|
|||||||
|
|
||||||
如果您是直接看的这个部分,建议先看一下上一节:`2、解决方法`以确保`JMX`的配置没有问题了。
|
如果您是直接看的这个部分,建议先看一下上一节:`2、解决方法`以确保`JMX`的配置没有问题了。
|
||||||
|
|
||||||
在JMX的配置等都没有问题的情况下,如果是因为认证的原因导致连接不了的,此时可以使用下面介绍的方法进行解决。
|
在`JMX`的配置等都没有问题的情况下,如果是因为认证的原因导致连接不了的,可以在集群接入界面配置你的`JMX`认证信息。
|
||||||
|
|
||||||
**当前这块后端刚刚开发完成,可能还不够完善,有问题随时沟通。**
|
<img src='http://img-ys011.didistatic.com/static/dc2img/do1_EUU352qMEX1Jdp7pxizp' width=350>
|
||||||
|
|
||||||
`Logi-KafkaManager 2.2.0+`之后的版本后端已经支持`JMX`认证方式的连接,但是还没有界面,此时我们可以往`cluster`表的`jmx_properties`字段写入`JMX`的认证信息。
|
|
||||||
|
|
||||||
这个数据是`json`格式的字符串,例子如下所示:
|
|
||||||
|
|
||||||
|
### 4、解决方法 —— JMX连接特定网络
|
||||||
|
|
||||||
|
可以手动往`ks_km_physical_cluster`表的`jmx_properties`字段增加一个`useWhichEndpoint`字段,从而控制 `KnowStreaming` 连接到特定的JMX IP及PORT。
|
||||||
|
|
||||||
|
`jmx_properties`格式:
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"maxConn": 10, # KM对单台Broker的最大JMX连接数
|
"maxConn": 100, # KM对单台Broker的最大JMX连接数
|
||||||
"username": "xxxxx", # 用户名
|
"username": "xxxxx", # 用户名,可以不填写
|
||||||
"password": "xxxx", # 密码
|
"password": "xxxx", # 密码,可以不填写
|
||||||
"openSSL": true, # 开启SSL, true表示开启ssl, false表示关闭
|
"openSSL": true, # 开启SSL, true表示开启ssl, false表示关闭
|
||||||
|
"useWhichEndpoint": "EXTERNAL" #指定要连接的网络名称,填写EXTERNAL就是连接endpoints里面的EXTERNAL地址
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
SQL的例子:
|
SQL例子:
|
||||||
```sql
|
```sql
|
||||||
UPDATE cluster SET jmx_properties='{ "maxConn": 10, "username": "xxxxx", "password": "xxxx", "openSSL": false }' where id={xxx};
|
UPDATE ks_km_physical_cluster SET jmx_properties='{ "maxConn": 10, "username": "xxxxx", "password": "xxxx", "openSSL": false , "useWhichEndpoint": "xxx"}' where id={xxx};
|
||||||
```
|
```
|
||||||
|
|
||||||
|
注意:
|
||||||
|
|
||||||
|
+ 目前此功能只支持采用 `ZK` 做分布式协调的kafka集群。
|
||||||
|
|
||||||
|
|
||||||
@@ -6,9 +6,10 @@
|
|||||||
|
|
||||||
### 2.1.1、安装说明
|
### 2.1.1、安装说明
|
||||||
|
|
||||||
- 以 `v3.0.0-bete` 版本为例进行部署;
|
- 以 `v3.0.0-beta.1` 版本为例进行部署;
|
||||||
- 以 CentOS-7 为例,系统基础配置要求 4C-8G;
|
- 以 CentOS-7 为例,系统基础配置要求 4C-8G;
|
||||||
- 部署完成后,可通过浏览器:`IP:PORT` 进行访问,默认端口是 `8080`,系统默认账号密码: `admin` / `admin2022_`;
|
- 部署完成后,可通过浏览器:`IP:PORT` 进行访问,默认端口是 `8080`,系统默认账号密码: `admin` / `admin2022_`。
|
||||||
|
- `v3.0.0-beta.2`版本开始,默认账号密码为`admin` / `admin`;
|
||||||
- 本文为单机部署,如需分布式部署,[请联系我们](https://knowstreaming.com/support-center)
|
- 本文为单机部署,如需分布式部署,[请联系我们](https://knowstreaming.com/support-center)
|
||||||
|
|
||||||
**软件依赖**
|
**软件依赖**
|
||||||
@@ -19,7 +20,7 @@
|
|||||||
| ElasticSearch | v7.6+ | 8060 |
|
| ElasticSearch | v7.6+ | 8060 |
|
||||||
| JDK | v8+ | - |
|
| JDK | v8+ | - |
|
||||||
| CentOS | v6+ | - |
|
| CentOS | v6+ | - |
|
||||||
| Ubantu | v16+ | - |
|
| Ubuntu | v16+ | - |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -29,7 +30,7 @@
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 在服务器中下载安装脚本, 该脚本中会在当前目录下,重新安装MySQL。重装后的mysql密码存放在当前目录的mysql.password文件中。
|
# 在服务器中下载安装脚本, 该脚本中会在当前目录下,重新安装MySQL。重装后的mysql密码存放在当前目录的mysql.password文件中。
|
||||||
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/deploy_KnowStreaming.sh
|
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/deploy_KnowStreaming-3.0.0-beta.1.sh
|
||||||
|
|
||||||
# 执行脚本
|
# 执行脚本
|
||||||
sh deploy_KnowStreaming.sh
|
sh deploy_KnowStreaming.sh
|
||||||
@@ -42,10 +43,10 @@ sh deploy_KnowStreaming.sh
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 将安装包下载到本地且传输到目标服务器
|
# 将安装包下载到本地且传输到目标服务器
|
||||||
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/KnowStreaming-3.0.0-beta—offline.tar.gz
|
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/KnowStreaming-3.0.0-beta.1-offline.tar.gz
|
||||||
|
|
||||||
# 解压安装包
|
# 解压安装包
|
||||||
tar -zxf KnowStreaming-3.0.0-beta—offline.tar.gz
|
tar -zxf KnowStreaming-3.0.0-beta.1-offline.tar.gz
|
||||||
|
|
||||||
# 执行安装脚本
|
# 执行安装脚本
|
||||||
sh deploy_KnowStreaming-offline.sh
|
sh deploy_KnowStreaming-offline.sh
|
||||||
@@ -58,28 +59,129 @@ sh deploy_KnowStreaming-offline.sh
|
|||||||
|
|
||||||
### 2.1.3、容器部署
|
### 2.1.3、容器部署
|
||||||
|
|
||||||
|
#### 2.1.3.1、Helm
|
||||||
|
|
||||||
**环境依赖**
|
**环境依赖**
|
||||||
|
|
||||||
- Kubernetes >= 1.14 ,Helm >= 2.17.0
|
- Kubernetes >= 1.14 ,Helm >= 2.17.0
|
||||||
|
|
||||||
- 默认配置为全部安装( ElasticSearch + MySQL + KnowStreaming)
|
- 默认依赖全部安装,ElasticSearch(3 节点集群模式) + MySQL(单机) + KnowStreaming-manager + KnowStreaming-ui
|
||||||
|
|
||||||
- 如果使用已有的 ElasticSearch(7.6.x) 和 MySQL(5.7) 只需调整 values.yaml 部分参数即可
|
- 使用已有的 ElasticSearch(7.6.x) 和 MySQL(5.7) 只需调整 values.yaml 部分参数即可
|
||||||
|
|
||||||
**安装命令**
|
**安装命令**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 下载安装包
|
# 相关镜像在Docker Hub都可以下载
|
||||||
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/knowstreaming-3.0.0-hlem.tgz
|
# 快速安装(NAMESPACE需要更改为已存在的,安装启动需要几分钟初始化请稍等~)
|
||||||
|
helm install -n [NAMESPACE] [NAME] http://download.knowstreaming.com/charts/knowstreaming-manager-0.1.3.tgz
|
||||||
# 解压安装包
|
|
||||||
tar -zxf knowstreaming-3.0.0-hlem.tgz
|
|
||||||
|
|
||||||
# 执行命令(NAMESPACE需要更改为已存在的)
|
|
||||||
helm install -n [NAMESPACE] knowstreaming knowstreaming-manager/
|
|
||||||
|
|
||||||
# 获取KnowStreaming前端ui的service. 默认nodeport方式.
|
# 获取KnowStreaming前端ui的service. 默认nodeport方式.
|
||||||
# (http://nodeIP:nodeport,默认用户名密码:admin/admin2022_)
|
# (http://nodeIP:nodeport,默认用户名密码:admin/admin2022_)
|
||||||
|
# `v3.0.0-beta.2`版本开始,默认账号密码为`admin` / `admin`;
|
||||||
|
|
||||||
|
# 添加仓库
|
||||||
|
helm repo add knowstreaming http://download.knowstreaming.com/charts
|
||||||
|
|
||||||
|
# 拉取最新版本
|
||||||
|
helm pull knowstreaming/knowstreaming-manager
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### 2.1.3.2、Docker Compose
|
||||||
|
```yml
|
||||||
|
version: "3"
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
knowstreaming-manager:
|
||||||
|
image: knowstreaming/knowstreaming-manager:0.2.0-test
|
||||||
|
container_name: knowstreaming-manager
|
||||||
|
privileged: true
|
||||||
|
restart: always
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch-single
|
||||||
|
- knowstreaming-mysql
|
||||||
|
expose:
|
||||||
|
- 80
|
||||||
|
command:
|
||||||
|
- /bin/sh
|
||||||
|
- /ks-start.sh
|
||||||
|
environment:
|
||||||
|
TZ: Asia/Shanghai
|
||||||
|
|
||||||
|
SERVER_MYSQL_ADDRESS: knowstreaming-mysql:3306
|
||||||
|
SERVER_MYSQL_DB: know_streaming
|
||||||
|
SERVER_MYSQL_USER: root
|
||||||
|
SERVER_MYSQL_PASSWORD: admin2022_
|
||||||
|
|
||||||
|
SERVER_ES_ADDRESS: elasticsearch-single:9200
|
||||||
|
|
||||||
|
JAVA_OPTS: -Xmx1g -Xms1g
|
||||||
|
|
||||||
|
# extra_hosts:
|
||||||
|
# - "hostname:x.x.x.x"
|
||||||
|
# volumes:
|
||||||
|
# - /ks/manage/log:/logs
|
||||||
|
knowstreaming-ui:
|
||||||
|
image: knowstreaming/knowstreaming-ui:0.2.0-test1
|
||||||
|
container_name: knowstreaming-ui
|
||||||
|
restart: always
|
||||||
|
ports:
|
||||||
|
- '18092:80'
|
||||||
|
environment:
|
||||||
|
TZ: Asia/Shanghai
|
||||||
|
depends_on:
|
||||||
|
- knowstreaming-manager
|
||||||
|
# extra_hosts:
|
||||||
|
# - "hostname:x.x.x.x"
|
||||||
|
|
||||||
|
elasticsearch-single:
|
||||||
|
image: docker.io/library/elasticsearch:7.6.2
|
||||||
|
container_name: elasticsearch-single
|
||||||
|
restart: always
|
||||||
|
expose:
|
||||||
|
- 9200
|
||||||
|
- 9300
|
||||||
|
# ports:
|
||||||
|
# - '9200:9200'
|
||||||
|
# - '9300:9300'
|
||||||
|
environment:
|
||||||
|
TZ: Asia/Shanghai
|
||||||
|
ES_JAVA_OPTS: -Xms512m -Xmx512m
|
||||||
|
discovery.type: single-node
|
||||||
|
# volumes:
|
||||||
|
# - /ks/es/data:/usr/share/elasticsearch/data
|
||||||
|
|
||||||
|
knowstreaming-init:
|
||||||
|
image: knowstreaming/knowstreaming-manager:0.2.0-test
|
||||||
|
container_name: knowstreaming_init
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch-single
|
||||||
|
command:
|
||||||
|
- /bin/bash
|
||||||
|
- /es_template_create.sh
|
||||||
|
environment:
|
||||||
|
TZ: Asia/Shanghai
|
||||||
|
SERVER_ES_ADDRESS: elasticsearch-single:9200
|
||||||
|
|
||||||
|
|
||||||
|
knowstreaming-mysql:
|
||||||
|
image: knowstreaming/knowstreaming-mysql:0.2.0-test
|
||||||
|
container_name: knowstreaming-mysql
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
TZ: Asia/Shanghai
|
||||||
|
MYSQL_ROOT_PASSWORD: admin2022_
|
||||||
|
MYSQL_DATABASE: know_streaming
|
||||||
|
MYSQL_ROOT_HOST: '%'
|
||||||
|
expose:
|
||||||
|
- 3306
|
||||||
|
# ports:
|
||||||
|
# - '3306:3306'
|
||||||
|
# volumes:
|
||||||
|
# - /ks/mysql/data:/data/mysql
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
@@ -219,10 +321,10 @@ sh /data/elasticsearch/control.sh status
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 下载安装包
|
# 下载安装包
|
||||||
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/KnowStreaming-3.0.0-beta.tar.gz
|
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/KnowStreaming-3.0.0-beta.1.tar.gz
|
||||||
|
|
||||||
# 解压安装包到指定目录
|
# 解压安装包到指定目录
|
||||||
tar -zxf KnowStreaming-3.0.0-beta.tar.gz -C /data/
|
tar -zxf KnowStreaming-3.0.0-beta.1.tar.gz -C /data/
|
||||||
|
|
||||||
# 修改启动脚本并加入systemd管理
|
# 修改启动脚本并加入systemd管理
|
||||||
cd /data/KnowStreaming/
|
cd /data/KnowStreaming/
|
||||||
@@ -236,7 +338,7 @@ mysql -uroot -pDidi_km_678 know_streaming < ./init/sql/dml-ks-km.sql
|
|||||||
mysql -uroot -pDidi_km_678 know_streaming < ./init/sql/dml-logi.sql
|
mysql -uroot -pDidi_km_678 know_streaming < ./init/sql/dml-logi.sql
|
||||||
|
|
||||||
# 创建elasticsearch初始化数据
|
# 创建elasticsearch初始化数据
|
||||||
sh ./init/template/template.sh
|
sh ./bin/init_es_template.sh
|
||||||
|
|
||||||
# 修改配置文件
|
# 修改配置文件
|
||||||
vim ./conf/application.yml
|
vim ./conf/application.yml
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
|

|
||||||

|
|
||||||
|
|
||||||
|
|
||||||
# `Know Streaming` 源码编译打包手册
|
# `Know Streaming` 源码编译打包手册
|
||||||
|
|
||||||
@@ -11,7 +9,7 @@
|
|||||||
`windows7+`、`Linux`、`Mac`
|
`windows7+`、`Linux`、`Mac`
|
||||||
|
|
||||||
**环境依赖**
|
**环境依赖**
|
||||||
|
|
||||||
- Maven 3.6.3 (后端)
|
- Maven 3.6.3 (后端)
|
||||||
- Node v12.20.0/v14.17.3 (前端)
|
- Node v12.20.0/v14.17.3 (前端)
|
||||||
- Java 8+ (后端)
|
- Java 8+ (后端)
|
||||||
@@ -25,27 +23,23 @@
|
|||||||
|
|
||||||
具体见下面描述。
|
具体见下面描述。
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### 2.1、前后端合并打包
|
### 2.1、前后端合并打包
|
||||||
|
|
||||||
1. 下载源码;
|
1. 下载源码;
|
||||||
2. 进入 `KS-KM` 工程目录,执行 `mvn -Prelease-package -Dmaven.test.skip=true clean install -U` 命令;
|
2. 进入 `KS-KM` 工程目录,执行 `mvn -Prelease-package -Dmaven.test.skip=true clean install -U` 命令;
|
||||||
3. 打包命令执行完成后,会在 `km-dist/target` 目录下面生成一个 `KnowStreaming-*.tar.gz` 的安装包。
|
3. 打包命令执行完成后,会在 `km-dist/target` 目录下面生成一个 `KnowStreaming-*.tar.gz` 的安装包。
|
||||||
|
|
||||||
|
### 2.2、前端单独打包
|
||||||
### 2.2、前端单独打包
|
|
||||||
|
|
||||||
1. 下载源码;
|
1. 下载源码;
|
||||||
2. 进入 `KS-KM/km-console` 工程目录;
|
2. 跳转到 [前端打包构建文档](https://github.com/didi/KnowStreaming/blob/master/km-console/README.md) 按步骤进行。打包成功后,会在 `km-rest/src/main/resources` 目录下生成名为 `templates` 的前端静态资源包;
|
||||||
3. 执行 `npm run build`命令,会在 `KS-KM/km-console` 目录下生成一个名为 `pub` 的前端静态资源包;
|
3. 如果上一步过程中报错,请查看 [FAQ](https://github.com/didi/KnowStreaming/blob/master/docs/user_guide/faq.md) 第 8.10 条;
|
||||||
|
|
||||||
|
### 2.3、后端单独打包
|
||||||
|
|
||||||
### 2.3、后端单独打包
|
|
||||||
|
|
||||||
1. 下载源码;
|
1. 下载源码;
|
||||||
2. 修改顶层 `pom.xml` ,去掉其中的 `km-console` 模块,如下所示;
|
2. 修改顶层 `pom.xml` ,去掉其中的 `km-console` 模块,如下所示;
|
||||||
|
|
||||||
```xml
|
```xml
|
||||||
<modules>
|
<modules>
|
||||||
<!-- <module>km-console</module>-->
|
<!-- <module>km-console</module>-->
|
||||||
@@ -62,10 +56,7 @@
|
|||||||
<module>km-rest</module>
|
<module>km-rest</module>
|
||||||
<module>km-dist</module>
|
<module>km-dist</module>
|
||||||
</modules>
|
</modules>
|
||||||
```
|
```
|
||||||
|
|
||||||
3. 执行 `mvn -U clean package -Dmaven.test.skip=true`命令;
|
3. 执行 `mvn -U clean package -Dmaven.test.skip=true`命令;
|
||||||
4. 执行完成之后会在 `KS-KM/km-rest/target` 目录下面生成一个 `ks-km.jar` 即为KS的后端部署的Jar包,也可以执行 `mvn -Prelease-package -Dmaven.test.skip=true clean install -U` 生成的tar包也仅有后端服务的功能;
|
4. 执行完成之后会在 `KS-KM/km-rest/target` 目录下面生成一个 `ks-km.jar` 即为 KS 的后端部署的 Jar 包,也可以执行 `mvn -Prelease-package -Dmaven.test.skip=true clean install -U` 生成的 tar 包也仅有后端服务的功能;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,102 @@
|
|||||||
## 6.2、版本升级手册
|
## 6.2、版本升级手册
|
||||||
|
|
||||||
**`2.x`版本 升级至 `3.0.0`版本**
|
注意:如果想升级至具体版本,需要将你当前版本至你期望使用版本的变更统统执行一遍,然后才能正常使用。
|
||||||
|
|
||||||
|
### 6.2.0、升级至 `master` 版本
|
||||||
|
|
||||||
|
暂无
|
||||||
|
|
||||||
|
### 6.2.1、升级至 `v3.0.0-beta.2`版本
|
||||||
|
|
||||||
|
**配置变更**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
|
||||||
|
# 新增配置
|
||||||
|
spring:
|
||||||
|
logi-security: # know-streaming 依赖的 logi-security 模块的数据库的配置,默认与 know-streaming 的数据库配置保持一致即可
|
||||||
|
login-extend-bean-name: logiSecurityDefaultLoginExtendImpl # 使用的登录系统Service的Bean名称,无需修改
|
||||||
|
|
||||||
|
# 线程池大小相关配置,在task模块中,新增了三类线程池,
|
||||||
|
# 从而减少不同类型任务之间的相互影响,以及减少对logi-job内的线程池的影响
|
||||||
|
thread-pool:
|
||||||
|
task: # 任务模块的配置
|
||||||
|
metrics: # metrics采集任务配置
|
||||||
|
thread-num: 18 # metrics采集任务线程池核心线程数
|
||||||
|
queue-size: 180 # metrics采集任务线程池队列大小
|
||||||
|
metadata: # metadata同步任务配置
|
||||||
|
thread-num: 27 # metadata同步任务线程池核心线程数
|
||||||
|
queue-size: 270 # metadata同步任务线程池队列大小
|
||||||
|
common: # 剩余其他任务配置
|
||||||
|
thread-num: 15 # 剩余其他任务线程池核心线程数
|
||||||
|
queue-size: 150 # 剩余其他任务线程池队列大小
|
||||||
|
|
||||||
|
# 删除配置,下列配置将不再使用
|
||||||
|
thread-pool:
|
||||||
|
task: # 任务模块的配置
|
||||||
|
heaven: # 采集任务配置
|
||||||
|
thread-num: 20 # 采集任务线程池核心线程数
|
||||||
|
queue-size: 1000 # 采集任务线程池队列大小
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
**SQL 变更**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- 多集群管理权限2022-09-06新增
|
||||||
|
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2000', '多集群管理查看', '1593', '1', '2', '多集群管理查看', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2002', 'Topic-迁移副本', '1593', '1', '2', 'Topic-迁移副本', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2004', 'Topic-扩缩副本', '1593', '1', '2', 'Topic-扩缩副本', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2006', 'Cluster-LoadReBalance-周期均衡', '1593', '1', '2', 'Cluster-LoadReBalance-周期均衡', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2008', 'Cluster-LoadReBalance-立即均衡', '1593', '1', '2', 'Cluster-LoadReBalance-立即均衡', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2010', 'Cluster-LoadReBalance-设置集群规格', '1593', '1', '2', 'Cluster-LoadReBalance-设置集群规格', '0', 'know-streaming');
|
||||||
|
|
||||||
|
|
||||||
|
-- 系统管理权限2022-09-06新增
|
||||||
|
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('3000', '系统管理查看', '1595', '1', '2', '系统管理查看', '0', 'know-streaming');
|
||||||
|
|
||||||
|
|
||||||
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2000', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2002', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2004', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2006', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2008', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2010', '0', 'know-streaming');
|
||||||
|
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '3000', '0', 'know-streaming');
|
||||||
|
|
||||||
|
-- 修改字段长度
|
||||||
|
ALTER TABLE `logi_security_oplog`
|
||||||
|
CHANGE COLUMN `operator_ip` `operator_ip` VARCHAR(64) NOT NULL COMMENT '操作者ip' ,
|
||||||
|
CHANGE COLUMN `operator` `operator` VARCHAR(64) NULL DEFAULT NULL COMMENT '操作者账号' ,
|
||||||
|
CHANGE COLUMN `operate_page` `operate_page` VARCHAR(64) NOT NULL DEFAULT '' COMMENT '操作页面' ,
|
||||||
|
CHANGE COLUMN `operate_type` `operate_type` VARCHAR(64) NOT NULL COMMENT '操作类型' ,
|
||||||
|
CHANGE COLUMN `target_type` `target_type` VARCHAR(64) NOT NULL COMMENT '对象分类' ,
|
||||||
|
CHANGE COLUMN `target` `target` VARCHAR(1024) NOT NULL COMMENT '操作对象' ,
|
||||||
|
CHANGE COLUMN `operation_methods` `operation_methods` VARCHAR(64) NOT NULL DEFAULT '' COMMENT '操作方式' ;
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 6.2.2、升级至 `v3.0.0-beta.1`版本
|
||||||
|
|
||||||
|
**SQL 变更**
|
||||||
|
|
||||||
|
1、在`ks_km_broker`表增加了一个监听信息字段。
|
||||||
|
2、为`logi_security_oplog`表 operation_methods 字段设置默认值''。
|
||||||
|
因此需要执行下面的 sql 对数据库表进行更新。
|
||||||
|
|
||||||
|
```sql
|
||||||
|
ALTER TABLE `ks_km_broker`
|
||||||
|
ADD COLUMN `endpoint_map` VARCHAR(1024) NOT NULL DEFAULT '' COMMENT '监听信息' AFTER `update_time`;
|
||||||
|
|
||||||
|
ALTER TABLE `logi_security_oplog`
|
||||||
|
ALTER COLUMN `operation_methods` set default '';
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 6.2.3、`2.x`版本 升级至 `v3.0.0-beta.0`版本
|
||||||
|
|
||||||
**升级步骤:**
|
**升级步骤:**
|
||||||
|
|
||||||
@@ -24,14 +120,14 @@
|
|||||||
UPDATE ks_km_topic
|
UPDATE ks_km_topic
|
||||||
INNER JOIN
|
INNER JOIN
|
||||||
(SELECT
|
(SELECT
|
||||||
topic.cluster_id AS cluster_id,
|
topic.cluster_id AS cluster_id,
|
||||||
topic.topic_name AS topic_name,
|
topic.topic_name AS topic_name,
|
||||||
topic.description AS description
|
topic.description AS description
|
||||||
FROM topic WHERE description != ''
|
FROM topic WHERE description != ''
|
||||||
) AS t
|
) AS t
|
||||||
|
|
||||||
ON ks_km_topic.cluster_phy_id = t.cluster_id
|
ON ks_km_topic.cluster_phy_id = t.cluster_id
|
||||||
AND ks_km_topic.topic_name = t.topic_name
|
AND ks_km_topic.topic_name = t.topic_name
|
||||||
AND ks_km_topic.id > 0
|
AND ks_km_topic.id > 0
|
||||||
SET ks_km_topic.description = t.description;
|
SET ks_km_topic.description = t.description;
|
||||||
```
|
```
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
|
# FAQ
|
||||||
# FAQ
|
|
||||||
|
|
||||||
## 8.1、支持哪些 Kafka 版本?
|
## 8.1、支持哪些 Kafka 版本?
|
||||||
|
|
||||||
@@ -109,3 +108,77 @@ SECURITY.TRICK_USERS
|
|||||||
设置完成上面两步之后,就可以直接调用需要登录的接口了。
|
设置完成上面两步之后,就可以直接调用需要登录的接口了。
|
||||||
|
|
||||||
但是还有一点需要注意,绕过的用户仅能调用他有权限的接口,比如一个普通用户,那么他就只能调用普通的接口,不能去调用运维人员的接口。
|
但是还有一点需要注意,绕过的用户仅能调用他有权限的接口,比如一个普通用户,那么他就只能调用普通的接口,不能去调用运维人员的接口。
|
||||||
|
|
||||||
|
## 8.8、Specified key was too long; max key length is 767 bytes
|
||||||
|
|
||||||
|
**原因:** 不同版本的 InoDB 引擎,参数‘innodb_large_prefix’默认值不同,即在 5.6 默认值为 OFF,5.7 默认值为 ON。
|
||||||
|
|
||||||
|
对于引擎为 InnoDB,innodb_large_prefix=OFF,且行格式为 Antelope 即支持 REDUNDANT 或 COMPACT 时,索引键前缀长度最大为 767 字节。innodb_large_prefix=ON,且行格式为 Barracuda 即支持 DYNAMIC 或 COMPRESSED 时,索引键前缀长度最大为 3072 字节。
|
||||||
|
|
||||||
|
**解决方案:**
|
||||||
|
|
||||||
|
- 减少 varchar 字符大小低于 767/4=191。
|
||||||
|
- 将字符集改为 latin1(一个字符=一个字节)。
|
||||||
|
- 开启‘innodb_large_prefix’,修改默认行格式‘innodb_file_format’为 Barracuda,并设置 row_format=dynamic。
|
||||||
|
|
||||||
|
## 8.9、出现 ESIndexNotFoundEXception 报错
|
||||||
|
|
||||||
|
**原因 :**没有创建 ES 索引模版
|
||||||
|
|
||||||
|
**解决方案:**执行 init_es_template.sh 脚本,创建 ES 索引模版即可。
|
||||||
|
|
||||||
|
## 8.10、km-console 打包构建失败
|
||||||
|
|
||||||
|
首先,**请确保您正在使用最新版本**,版本列表见 [Tags](https://github.com/didi/KnowStreaming/tags)。如果不是最新版本,请升级后再尝试有无问题。
|
||||||
|
|
||||||
|
常见的原因是由于工程依赖没有正常安装,导致在打包过程中缺少依赖,造成打包失败。您可以检查是否有以下文件夹,且文件夹内是否有内容
|
||||||
|
|
||||||
|
```
|
||||||
|
KnowStreaming/km-console/node_modules
|
||||||
|
KnowStreaming/km-console/packages/layout-clusters-fe/node_modules
|
||||||
|
KnowStreaming/km-console/packages/config-manager-fe/node_modules
|
||||||
|
```
|
||||||
|
|
||||||
|
如果发现没有对应的 `node_modules` 目录或着目录内容为空,说明依赖没有安装成功。请按以下步骤操作,
|
||||||
|
|
||||||
|
1. 手动删除上述三个文件夹(如果有)
|
||||||
|
|
||||||
|
2. 如果之前是通过 `mvn install` 打包 `km-console`,请到项目根目录(KnowStreaming)下重新输入该指令进行打包。观察打包过程有无报错。如有报错,请见步骤 4。
|
||||||
|
|
||||||
|
3. 如果是通过本地独立构建前端工程的方式(指直接执行 `npm run build`),请进入 `KnowStreaming/km-console` 目录,执行下述步骤(注意:执行时请确保您在使用 `node v12` 版本)
|
||||||
|
|
||||||
|
a. 执行 `npm run i`。如有报错,请见步骤 4。
|
||||||
|
|
||||||
|
b. 执行 `npm run build`。如有报错,请见步骤 4。
|
||||||
|
|
||||||
|
4. 麻烦联系我们协助解决。推荐提供以下信息,方面我们快速定位问题,示例如下。
|
||||||
|
|
||||||
|
```
|
||||||
|
操作系统: Mac
|
||||||
|
命令行终端:bash
|
||||||
|
Node 版本: v12.22.12
|
||||||
|
复现步骤: 1. -> 2.
|
||||||
|
错误截图:
|
||||||
|
```
|
||||||
|
|
||||||
|
## 8.11、在 `km-console` 目录下执行 `npm run start` 时看不到应用构建和热加载过程?如何启动单个应用?
|
||||||
|
|
||||||
|
需要到具体的应用中执行 `npm run start`,例如 `cd packages/layout-clusters-fe` 后,执行 `npm run start`。
|
||||||
|
|
||||||
|
应用启动后需要到基座应用中查看(需要启动基座应用,即 layout-clusters-fe)。
|
||||||
|
|
||||||
|
|
||||||
|
## 8.12、权限识别失败问题
|
||||||
|
1、使用admin账号登陆KnowStreaming时,点击系统管理-用户管理-角色管理-新增角色,查看页面是否正常。
|
||||||
|
|
||||||
|
<img src="http://img-ys011.didistatic.com/static/dc2img/do1_gwGfjN9N92UxzHU8dfzr" width = "400" >
|
||||||
|
|
||||||
|
2、查看'/logi-security/api/v1/permission/tree'接口返回值,出现如下图所示乱码现象。
|
||||||
|

|
||||||
|
|
||||||
|
3、查看logi_security_permission表,看看是否出现了中文乱码现象。
|
||||||
|
|
||||||
|
根据以上几点,我们可以确定是由于数据库乱码造成的权限识别失败问题。
|
||||||
|
|
||||||
|
+ 原因:由于数据库编码和我们提供的脚本不一致,数据库里的数据发生了乱码,因此出现权限识别失败问题。
|
||||||
|
+ 解决方案:清空数据库数据,将数据库字符集调整为utf8,最后重新执行[dml-logi.sql](https://github.com/didi/KnowStreaming/blob/master/km-dist/init/sql/dml-logi.sql)脚本导入数据即可。
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
下面是用户第一次使用我们产品的典型体验路径:
|
下面是用户第一次使用我们产品的典型体验路径:
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## 5.3、常用功能
|
## 5.3、常用功能
|
||||||
|
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.Topic;
|
|||||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.res.ClusterBrokersOverviewVO;
|
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.res.ClusterBrokersOverviewVO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.res.ClusterBrokersStateVO;
|
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.res.ClusterBrokersStateVO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.kafkacontroller.KafkaControllerVO;
|
import com.xiaojukeji.know.streaming.km.common.bean.vo.kafkacontroller.KafkaControllerVO;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.constant.KafkaConstant;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.SortTypeEnum;
|
import com.xiaojukeji.know.streaming.km.common.enums.SortTypeEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.PaginationMetricsUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.PaginationMetricsUtil;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.PaginationUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.PaginationUtil;
|
||||||
@@ -71,6 +72,9 @@ public class ClusterBrokersManagerImpl implements ClusterBrokersManager {
|
|||||||
Topic groupTopic = topicService.getTopic(clusterPhyId, org.apache.kafka.common.internals.Topic.GROUP_METADATA_TOPIC_NAME);
|
Topic groupTopic = topicService.getTopic(clusterPhyId, org.apache.kafka.common.internals.Topic.GROUP_METADATA_TOPIC_NAME);
|
||||||
Topic transactionTopic = topicService.getTopic(clusterPhyId, org.apache.kafka.common.internals.Topic.TRANSACTION_STATE_TOPIC_NAME);
|
Topic transactionTopic = topicService.getTopic(clusterPhyId, org.apache.kafka.common.internals.Topic.TRANSACTION_STATE_TOPIC_NAME);
|
||||||
|
|
||||||
|
//获取controller信息
|
||||||
|
KafkaController kafkaController = kafkaControllerService.getKafkaControllerFromDB(clusterPhyId);
|
||||||
|
|
||||||
// 格式转换
|
// 格式转换
|
||||||
return PaginationResult.buildSuc(
|
return PaginationResult.buildSuc(
|
||||||
this.convert2ClusterBrokersOverviewVOList(
|
this.convert2ClusterBrokersOverviewVOList(
|
||||||
@@ -78,7 +82,8 @@ public class ClusterBrokersManagerImpl implements ClusterBrokersManager {
|
|||||||
brokerList,
|
brokerList,
|
||||||
metricsResult.getData(),
|
metricsResult.getData(),
|
||||||
groupTopic,
|
groupTopic,
|
||||||
transactionTopic
|
transactionTopic,
|
||||||
|
kafkaController
|
||||||
),
|
),
|
||||||
paginationResult
|
paginationResult
|
||||||
);
|
);
|
||||||
@@ -159,7 +164,8 @@ public class ClusterBrokersManagerImpl implements ClusterBrokersManager {
|
|||||||
List<Broker> brokerList,
|
List<Broker> brokerList,
|
||||||
List<BrokerMetrics> metricsList,
|
List<BrokerMetrics> metricsList,
|
||||||
Topic groupTopic,
|
Topic groupTopic,
|
||||||
Topic transactionTopic) {
|
Topic transactionTopic,
|
||||||
|
KafkaController kafkaController) {
|
||||||
Map<Integer, BrokerMetrics> metricsMap = metricsList == null? new HashMap<>(): metricsList.stream().collect(Collectors.toMap(BrokerMetrics::getBrokerId, Function.identity()));
|
Map<Integer, BrokerMetrics> metricsMap = metricsList == null? new HashMap<>(): metricsList.stream().collect(Collectors.toMap(BrokerMetrics::getBrokerId, Function.identity()));
|
||||||
|
|
||||||
Map<Integer, Broker> brokerMap = brokerList == null? new HashMap<>(): brokerList.stream().collect(Collectors.toMap(Broker::getBrokerId, Function.identity()));
|
Map<Integer, Broker> brokerMap = brokerList == null? new HashMap<>(): brokerList.stream().collect(Collectors.toMap(Broker::getBrokerId, Function.identity()));
|
||||||
@@ -169,12 +175,12 @@ public class ClusterBrokersManagerImpl implements ClusterBrokersManager {
|
|||||||
Broker broker = brokerMap.get(brokerId);
|
Broker broker = brokerMap.get(brokerId);
|
||||||
BrokerMetrics brokerMetrics = metricsMap.get(brokerId);
|
BrokerMetrics brokerMetrics = metricsMap.get(brokerId);
|
||||||
|
|
||||||
voList.add(this.convert2ClusterBrokersOverviewVO(brokerId, broker, brokerMetrics, groupTopic, transactionTopic));
|
voList.add(this.convert2ClusterBrokersOverviewVO(brokerId, broker, brokerMetrics, groupTopic, transactionTopic, kafkaController));
|
||||||
}
|
}
|
||||||
return voList;
|
return voList;
|
||||||
}
|
}
|
||||||
|
|
||||||
private ClusterBrokersOverviewVO convert2ClusterBrokersOverviewVO(Integer brokerId, Broker broker, BrokerMetrics brokerMetrics, Topic groupTopic, Topic transactionTopic) {
|
private ClusterBrokersOverviewVO convert2ClusterBrokersOverviewVO(Integer brokerId, Broker broker, BrokerMetrics brokerMetrics, Topic groupTopic, Topic transactionTopic, KafkaController kafkaController) {
|
||||||
ClusterBrokersOverviewVO clusterBrokersOverviewVO = new ClusterBrokersOverviewVO();
|
ClusterBrokersOverviewVO clusterBrokersOverviewVO = new ClusterBrokersOverviewVO();
|
||||||
clusterBrokersOverviewVO.setBrokerId(brokerId);
|
clusterBrokersOverviewVO.setBrokerId(brokerId);
|
||||||
if (broker != null) {
|
if (broker != null) {
|
||||||
@@ -192,6 +198,9 @@ public class ClusterBrokersManagerImpl implements ClusterBrokersManager {
|
|||||||
if (transactionTopic != null && transactionTopic.getBrokerIdSet().contains(brokerId)) {
|
if (transactionTopic != null && transactionTopic.getBrokerIdSet().contains(brokerId)) {
|
||||||
clusterBrokersOverviewVO.getKafkaRoleList().add(transactionTopic.getTopicName());
|
clusterBrokersOverviewVO.getKafkaRoleList().add(transactionTopic.getTopicName());
|
||||||
}
|
}
|
||||||
|
if (kafkaController != null && kafkaController.getBrokerId().equals(brokerId)) {
|
||||||
|
clusterBrokersOverviewVO.getKafkaRoleList().add(KafkaConstant.CONTROLLER_ROLE);
|
||||||
|
}
|
||||||
|
|
||||||
clusterBrokersOverviewVO.setLatestMetrics(brokerMetrics);
|
clusterBrokersOverviewVO.setLatestMetrics(brokerMetrics);
|
||||||
return clusterBrokersOverviewVO;
|
return clusterBrokersOverviewVO;
|
||||||
|
|||||||
@@ -19,7 +19,8 @@ import com.xiaojukeji.know.streaming.km.common.bean.vo.group.GroupTopicConsumedD
|
|||||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.group.GroupTopicOverviewVO;
|
import com.xiaojukeji.know.streaming.km.common.bean.vo.group.GroupTopicOverviewVO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.AggTypeEnum;
|
import com.xiaojukeji.know.streaming.km.common.enums.AggTypeEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.GroupOffsetResetEnum;
|
import com.xiaojukeji.know.streaming.km.common.enums.OffsetTypeEnum;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.enums.group.GroupStateEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.exception.AdminOperateException;
|
import com.xiaojukeji.know.streaming.km.common.exception.AdminOperateException;
|
||||||
import com.xiaojukeji.know.streaming.km.common.exception.NotExistException;
|
import com.xiaojukeji.know.streaming.km.common.exception.NotExistException;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||||
@@ -75,7 +76,7 @@ public class GroupManagerImpl implements GroupManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!paginationResult.hasData()) {
|
if (!paginationResult.hasData()) {
|
||||||
return PaginationResult.buildSuc(dto);
|
return PaginationResult.buildSuc(new ArrayList<>(), paginationResult);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 获取指标
|
// 获取指标
|
||||||
@@ -171,7 +172,7 @@ public class GroupManagerImpl implements GroupManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!ConsumerGroupState.EMPTY.equals(description.state()) && !ConsumerGroupState.DEAD.equals(description.state())) {
|
if (!ConsumerGroupState.EMPTY.equals(description.state()) && !ConsumerGroupState.DEAD.equals(description.state())) {
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_OPERATE_FAILED, String.format("group处于%s, 重置失败(仅Empty情况可重置)", description.state().name()));
|
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_OPERATE_FAILED, String.format("group处于%s, 重置失败(仅Empty情况可重置)", GroupStateEnum.getByRawState(description.state()).getState()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 获取offset
|
// 获取offset
|
||||||
@@ -198,12 +199,12 @@ public class GroupManagerImpl implements GroupManager {
|
|||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getTopicNotExist(dto.getClusterId(), dto.getTopicName()));
|
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getTopicNotExist(dto.getClusterId(), dto.getTopicName()));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (GroupOffsetResetEnum.PRECISE_OFFSET.getResetType() == dto.getResetType()
|
if (OffsetTypeEnum.PRECISE_OFFSET.getResetType() == dto.getResetType()
|
||||||
&& ValidateUtils.isEmptyList(dto.getOffsetList())) {
|
&& ValidateUtils.isEmptyList(dto.getOffsetList())) {
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "参数错误,指定offset重置需传offset信息");
|
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "参数错误,指定offset重置需传offset信息");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (GroupOffsetResetEnum.PRECISE_TIMESTAMP.getResetType() == dto.getResetType()
|
if (OffsetTypeEnum.PRECISE_TIMESTAMP.getResetType() == dto.getResetType()
|
||||||
&& ValidateUtils.isNull(dto.getTimestamp())) {
|
&& ValidateUtils.isNull(dto.getTimestamp())) {
|
||||||
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "参数错误,指定时间重置需传时间信息");
|
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "参数错误,指定时间重置需传时间信息");
|
||||||
}
|
}
|
||||||
@@ -212,7 +213,7 @@ public class GroupManagerImpl implements GroupManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private Result<Map<TopicPartition, Long>> getPartitionOffset(GroupOffsetResetDTO dto) {
|
private Result<Map<TopicPartition, Long>> getPartitionOffset(GroupOffsetResetDTO dto) {
|
||||||
if (GroupOffsetResetEnum.PRECISE_OFFSET.getResetType() == dto.getResetType()) {
|
if (OffsetTypeEnum.PRECISE_OFFSET.getResetType() == dto.getResetType()) {
|
||||||
return Result.buildSuc(dto.getOffsetList().stream().collect(Collectors.toMap(
|
return Result.buildSuc(dto.getOffsetList().stream().collect(Collectors.toMap(
|
||||||
elem -> new TopicPartition(dto.getTopicName(), elem.getPartitionId()),
|
elem -> new TopicPartition(dto.getTopicName(), elem.getPartitionId()),
|
||||||
PartitionOffsetDTO::getOffset,
|
PartitionOffsetDTO::getOffset,
|
||||||
@@ -221,9 +222,9 @@ public class GroupManagerImpl implements GroupManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
OffsetSpec offsetSpec = null;
|
OffsetSpec offsetSpec = null;
|
||||||
if (GroupOffsetResetEnum.PRECISE_TIMESTAMP.getResetType() == dto.getResetType()) {
|
if (OffsetTypeEnum.PRECISE_TIMESTAMP.getResetType() == dto.getResetType()) {
|
||||||
offsetSpec = OffsetSpec.forTimestamp(dto.getTimestamp());
|
offsetSpec = OffsetSpec.forTimestamp(dto.getTimestamp());
|
||||||
} else if (GroupOffsetResetEnum.EARLIEST.getResetType() == dto.getResetType()) {
|
} else if (OffsetTypeEnum.EARLIEST.getResetType() == dto.getResetType()) {
|
||||||
offsetSpec = OffsetSpec.earliest();
|
offsetSpec = OffsetSpec.earliest();
|
||||||
} else {
|
} else {
|
||||||
offsetSpec = OffsetSpec.latest();
|
offsetSpec = OffsetSpec.latest();
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.biz.topic;
|
package com.xiaojukeji.know.streaming.km.biz.topic;
|
||||||
|
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationSortDTO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.topic.TopicRecordDTO;
|
import com.xiaojukeji.know.streaming.km.common.bean.dto.topic.TopicRecordDTO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.TopicBrokersPartitionsSummaryVO;
|
import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.TopicBrokersPartitionsSummaryVO;
|
||||||
|
|||||||
@@ -22,25 +22,26 @@ import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.partition.TopicPart
|
|||||||
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||||
import com.xiaojukeji.know.streaming.km.common.constant.KafkaConstant;
|
import com.xiaojukeji.know.streaming.km.common.constant.KafkaConstant;
|
||||||
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
||||||
import com.xiaojukeji.know.streaming.km.common.converter.PartitionConverter;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.converter.TopicVOConverter;
|
import com.xiaojukeji.know.streaming.km.common.converter.TopicVOConverter;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.enums.OffsetTypeEnum;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.enums.SortTypeEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.exception.AdminOperateException;
|
import com.xiaojukeji.know.streaming.km.common.exception.AdminOperateException;
|
||||||
import com.xiaojukeji.know.streaming.km.common.exception.NotExistException;
|
import com.xiaojukeji.know.streaming.km.common.exception.NotExistException;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.PaginationUtil;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService;
|
import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService;
|
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionMetricService;
|
import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionMetricService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicConfigService;
|
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionService;
|
import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionService;
|
||||||
|
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicConfigService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicMetricService;
|
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicMetricService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService;
|
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService;
|
||||||
import com.xiaojukeji.know.streaming.km.core.service.version.metrics.TopicMetricVersionItems;
|
import com.xiaojukeji.know.streaming.km.core.service.version.metrics.TopicMetricVersionItems;
|
||||||
|
import org.apache.commons.lang3.ObjectUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.kafka.clients.admin.OffsetSpec;
|
import org.apache.kafka.clients.admin.OffsetSpec;
|
||||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
import org.apache.kafka.clients.consumer.*;
|
||||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
|
||||||
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
|
||||||
import org.apache.kafka.clients.consumer.KafkaConsumer;
|
|
||||||
import org.apache.kafka.common.TopicPartition;
|
import org.apache.kafka.common.TopicPartition;
|
||||||
import org.apache.kafka.common.config.TopicConfig;
|
import org.apache.kafka.common.config.TopicConfig;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
@@ -129,7 +130,12 @@ public class TopicStateManagerImpl implements TopicStateManager {
|
|||||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getClusterPhyNotExist(clusterPhyId));
|
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getClusterPhyNotExist(clusterPhyId));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 获取分区offset
|
// 获取分区beginOffset
|
||||||
|
Result<Map<TopicPartition, Long>> beginOffsetsMapResult = partitionService.getPartitionOffsetFromKafka(clusterPhyId, topicName, dto.getFilterPartitionId(), OffsetSpec.earliest(), null);
|
||||||
|
if (beginOffsetsMapResult.failed()) {
|
||||||
|
return Result.buildFromIgnoreData(beginOffsetsMapResult);
|
||||||
|
}
|
||||||
|
// 获取分区endOffset
|
||||||
Result<Map<TopicPartition, Long>> endOffsetsMapResult = partitionService.getPartitionOffsetFromKafka(clusterPhyId, topicName, dto.getFilterPartitionId(), OffsetSpec.latest(), null);
|
Result<Map<TopicPartition, Long>> endOffsetsMapResult = partitionService.getPartitionOffsetFromKafka(clusterPhyId, topicName, dto.getFilterPartitionId(), OffsetSpec.latest(), null);
|
||||||
if (endOffsetsMapResult.failed()) {
|
if (endOffsetsMapResult.failed()) {
|
||||||
return Result.buildFromIgnoreData(endOffsetsMapResult);
|
return Result.buildFromIgnoreData(endOffsetsMapResult);
|
||||||
@@ -142,13 +148,48 @@ public class TopicStateManagerImpl implements TopicStateManager {
|
|||||||
// 创建kafka-consumer
|
// 创建kafka-consumer
|
||||||
kafkaConsumer = new KafkaConsumer<>(this.generateClientProperties(clusterPhy, dto.getMaxRecords()));
|
kafkaConsumer = new KafkaConsumer<>(this.generateClientProperties(clusterPhy, dto.getMaxRecords()));
|
||||||
|
|
||||||
kafkaConsumer.assign(endOffsetsMapResult.getData().keySet());
|
List<TopicPartition> partitionList = new ArrayList<>();
|
||||||
for (Map.Entry<TopicPartition, Long> entry: endOffsetsMapResult.getData().entrySet()) {
|
long maxMessage = 0;
|
||||||
kafkaConsumer.seek(entry.getKey(), Math.max(0, entry.getValue() - dto.getMaxRecords()));
|
for (Map.Entry<TopicPartition, Long> entry : endOffsetsMapResult.getData().entrySet()) {
|
||||||
|
long begin = beginOffsetsMapResult.getData().get(entry.getKey());
|
||||||
|
long end = entry.getValue();
|
||||||
|
if (begin == end){
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
maxMessage += end - begin;
|
||||||
|
partitionList.add(entry.getKey());
|
||||||
|
}
|
||||||
|
maxMessage = Math.min(maxMessage, dto.getMaxRecords());
|
||||||
|
kafkaConsumer.assign(partitionList);
|
||||||
|
|
||||||
|
Map<TopicPartition, OffsetAndTimestamp> partitionOffsetAndTimestampMap = new HashMap<>();
|
||||||
|
// 获取指定时间每个分区的offset(按指定开始时间查询消息时)
|
||||||
|
if (OffsetTypeEnum.PRECISE_TIMESTAMP.getResetType() == dto.getFilterOffsetReset()) {
|
||||||
|
Map<TopicPartition, Long> timestampsToSearch = new HashMap<>();
|
||||||
|
partitionList.forEach(topicPartition -> {
|
||||||
|
timestampsToSearch.put(topicPartition, dto.getStartTimestampUnitMs());
|
||||||
|
});
|
||||||
|
partitionOffsetAndTimestampMap = kafkaConsumer.offsetsForTimes(timestampsToSearch);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (TopicPartition partition : partitionList) {
|
||||||
|
if (OffsetTypeEnum.EARLIEST.getResetType() == dto.getFilterOffsetReset()) {
|
||||||
|
// 重置到最旧
|
||||||
|
kafkaConsumer.seek(partition, beginOffsetsMapResult.getData().get(partition));
|
||||||
|
} else if (OffsetTypeEnum.PRECISE_TIMESTAMP.getResetType() == dto.getFilterOffsetReset()) {
|
||||||
|
// 重置到指定时间
|
||||||
|
kafkaConsumer.seek(partition, partitionOffsetAndTimestampMap.get(partition).offset());
|
||||||
|
} else if (OffsetTypeEnum.PRECISE_OFFSET.getResetType() == dto.getFilterOffsetReset()) {
|
||||||
|
// 重置到指定位置
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// 默认,重置到最新
|
||||||
|
kafkaConsumer.seek(partition, Math.max(beginOffsetsMapResult.getData().get(partition), endOffsetsMapResult.getData().get(partition) - dto.getMaxRecords()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 这里需要减去 KafkaConstant.POLL_ONCE_TIMEOUT_UNIT_MS 是因为poll一次需要耗时,如果这里不减去,则可能会导致poll之后,超过要求的时间
|
// 这里需要减去 KafkaConstant.POLL_ONCE_TIMEOUT_UNIT_MS 是因为poll一次需要耗时,如果这里不减去,则可能会导致poll之后,超过要求的时间
|
||||||
while (System.currentTimeMillis() - startTime + KafkaConstant.POLL_ONCE_TIMEOUT_UNIT_MS <= dto.getPullTimeoutUnitMs() && voList.size() < dto.getMaxRecords()) {
|
while (System.currentTimeMillis() - startTime <= dto.getPullTimeoutUnitMs() && voList.size() < maxMessage) {
|
||||||
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofMillis(KafkaConstant.POLL_ONCE_TIMEOUT_UNIT_MS));
|
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofMillis(KafkaConstant.POLL_ONCE_TIMEOUT_UNIT_MS));
|
||||||
for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
|
for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
|
||||||
if (this.checkIfIgnore(consumerRecord, dto.getFilterKey(), dto.getFilterValue())) {
|
if (this.checkIfIgnore(consumerRecord, dto.getFilterKey(), dto.getFilterValue())) {
|
||||||
@@ -168,6 +209,15 @@ public class TopicStateManagerImpl implements TopicStateManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 排序
|
||||||
|
if (ObjectUtils.isNotEmpty(voList)) {
|
||||||
|
// 默认按时间倒序排序
|
||||||
|
if (StringUtils.isBlank(dto.getSortType())) {
|
||||||
|
dto.setSortType(SortTypeEnum.DESC.getSortType());
|
||||||
|
}
|
||||||
|
PaginationUtil.pageBySort(voList, dto.getSortField(), dto.getSortType());
|
||||||
|
}
|
||||||
|
|
||||||
return Result.buildSuc(voList.subList(0, Math.min(dto.getMaxRecords(), voList.size())));
|
return Result.buildSuc(voList.subList(0, Math.min(dto.getMaxRecords(), voList.size())));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error("method=getTopicMessages||clusterPhyId={}||topicName={}||param={}||errMsg=exception", clusterPhyId, topicName, dto, e);
|
log.error("method=getTopicMessages||clusterPhyId={}||topicName={}||param={}||errMsg=exception", clusterPhyId, topicName, dto, e);
|
||||||
|
|||||||
@@ -7,12 +7,14 @@ import com.didiglobal.logi.log.LogFactory;
|
|||||||
import com.didiglobal.logi.security.common.dto.config.ConfigDTO;
|
import com.didiglobal.logi.security.common.dto.config.ConfigDTO;
|
||||||
import com.didiglobal.logi.security.service.ConfigService;
|
import com.didiglobal.logi.security.service.ConfigService;
|
||||||
import com.xiaojukeji.know.streaming.km.biz.version.VersionControlManager;
|
import com.xiaojukeji.know.streaming.km.biz.version.VersionControlManager;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricDetailDTO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.UserMetricConfigDTO;
|
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.UserMetricConfigDTO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.metric.UserMetricConfig;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.metric.UserMetricConfig;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionControlItem;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionControlItem;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.config.metric.UserMetricConfigVO;
|
import com.xiaojukeji.know.streaming.km.common.bean.vo.config.metric.UserMetricConfigVO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.version.VersionItemVO;
|
import com.xiaojukeji.know.streaming.km.common.bean.vo.version.VersionItemVO;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum;
|
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.VersionUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.VersionUtil;
|
||||||
@@ -47,29 +49,29 @@ public class VersionControlManagerImpl implements VersionControlManager {
|
|||||||
@PostConstruct
|
@PostConstruct
|
||||||
public void init(){
|
public void init(){
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_HEALTH_SCORE, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_HEALTH_SCORE, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_TOTAL_PRODUCE_REQUESTS, true));
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_FAILED_FETCH_REQ, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_FAILED_FETCH_REQ, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_FAILED_PRODUCE_REQ, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_FAILED_PRODUCE_REQ, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_MESSAGE_IN, true));
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_UNDER_REPLICA_PARTITIONS, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_UNDER_REPLICA_PARTITIONS, true));
|
||||||
|
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_TOTAL_PRODUCE_REQUESTS, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_BYTES_IN, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_BYTES_IN, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_BYTES_OUT, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_BYTES_OUT, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_BYTES_REJECTED, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_BYTES_REJECTED, true));
|
||||||
|
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_MESSAGE_IN, true));
|
||||||
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_HEALTH_SCORE, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_HEALTH_SCORE, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_REQ_QUEUE_SIZE, true));
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_RES_QUEUE_SIZE, true));
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_ACTIVE_CONTROLLER_COUNT, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_ACTIVE_CONTROLLER_COUNT, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_PRODUCE_REQ, true));
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_LOG_SIZE, true));
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_CONNECTIONS, true));
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_MESSAGES_IN, true));
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_BYTES_IN, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_BYTES_IN, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_BYTES_OUT, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_BYTES_OUT, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_GROUP_REBALANCES, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_CONNECTIONS, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_JOB_RUNNING, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_MESSAGES_IN, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_PARTITIONS_NO_LEADER, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_PARTITIONS_NO_LEADER, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_PARTITION_URP, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_PARTITION_URP, true));
|
||||||
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_LOG_SIZE, true));
|
||||||
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_PRODUCE_REQ, true));
|
||||||
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_REQ_QUEUE_SIZE, true));
|
||||||
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_RES_QUEUE_SIZE, true));
|
||||||
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_GROUP_REBALANCES, true));
|
||||||
|
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_JOB_RUNNING, true));
|
||||||
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_GROUP.getCode(), GROUP_METRIC_OFFSET_CONSUMED, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_GROUP.getCode(), GROUP_METRIC_OFFSET_CONSUMED, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_GROUP.getCode(), GROUP_METRIC_LAG, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_GROUP.getCode(), GROUP_METRIC_LAG, true));
|
||||||
@@ -77,18 +79,18 @@ public class VersionControlManagerImpl implements VersionControlManager {
|
|||||||
defaultMetrics.add(new UserMetricConfig(METRIC_GROUP.getCode(), GROUP_METRIC_HEALTH_SCORE, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_GROUP.getCode(), GROUP_METRIC_HEALTH_SCORE, true));
|
||||||
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_HEALTH_SCORE, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_HEALTH_SCORE, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_REQ_QUEUE, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_CONNECTION_COUNT, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_RES_QUEUE, true));
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_MESSAGE_IN, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_MESSAGE_IN, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_PRODUCE_REQ, true));
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_NETWORK_RPO_AVG_IDLE, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_NETWORK_RPO_AVG_IDLE, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_REQ_AVG_IDLE, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_REQ_AVG_IDLE, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_CONNECTION_COUNT, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_PRODUCE_REQ, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_BYTES_IN, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_REQ_QUEUE, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_BYTES_OUT, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_RES_QUEUE, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_PARTITIONS_SKEW, true));
|
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_LEADERS_SKEW, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_LEADERS_SKEW, true));
|
||||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_UNDER_REPLICATE_PARTITION, true));
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_UNDER_REPLICATE_PARTITION, true));
|
||||||
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_PARTITIONS_SKEW, true));
|
||||||
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_BYTES_IN, true));
|
||||||
|
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_BYTES_OUT, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
@@ -159,6 +161,9 @@ public class VersionControlManagerImpl implements VersionControlManager {
|
|||||||
|
|
||||||
UserMetricConfig umc = userMetricConfigMap.get(itemType + "@" + metric);
|
UserMetricConfig umc = userMetricConfigMap.get(itemType + "@" + metric);
|
||||||
userMetricConfigVO.setSet(null != umc && umc.isSet());
|
userMetricConfigVO.setSet(null != umc && umc.isSet());
|
||||||
|
if (umc != null) {
|
||||||
|
userMetricConfigVO.setRank(umc.getRank());
|
||||||
|
}
|
||||||
userMetricConfigVO.setName(itemVO.getName());
|
userMetricConfigVO.setName(itemVO.getName());
|
||||||
userMetricConfigVO.setType(itemVO.getType());
|
userMetricConfigVO.setType(itemVO.getType());
|
||||||
userMetricConfigVO.setDesc(itemVO.getDesc());
|
userMetricConfigVO.setDesc(itemVO.getDesc());
|
||||||
@@ -178,13 +183,29 @@ public class VersionControlManagerImpl implements VersionControlManager {
|
|||||||
@Override
|
@Override
|
||||||
public Result<Void> updateUserMetricItem(Long clusterId, Integer type, UserMetricConfigDTO dto, String operator) {
|
public Result<Void> updateUserMetricItem(Long clusterId, Integer type, UserMetricConfigDTO dto, String operator) {
|
||||||
Map<String, Boolean> metricsSetMap = dto.getMetricsSet();
|
Map<String, Boolean> metricsSetMap = dto.getMetricsSet();
|
||||||
if(null == metricsSetMap || metricsSetMap.isEmpty()){
|
|
||||||
|
//转换metricDetailDTOList
|
||||||
|
List<MetricDetailDTO> metricDetailDTOList = dto.getMetricDetailDTOList();
|
||||||
|
Map<String, MetricDetailDTO> metricDetailMap = new HashMap<>();
|
||||||
|
if (metricDetailDTOList != null && !metricDetailDTOList.isEmpty()) {
|
||||||
|
metricDetailMap = metricDetailDTOList.stream().collect(Collectors.toMap(MetricDetailDTO::getMetric, Function.identity()));
|
||||||
|
}
|
||||||
|
|
||||||
|
//转换metricsSetMap
|
||||||
|
if (metricsSetMap != null && !metricsSetMap.isEmpty()) {
|
||||||
|
for (Map.Entry<String, Boolean> metricAndShowEntry : metricsSetMap.entrySet()) {
|
||||||
|
if (metricDetailMap.containsKey(metricAndShowEntry.getKey())) continue;
|
||||||
|
metricDetailMap.put(metricAndShowEntry.getKey(), new MetricDetailDTO(metricAndShowEntry.getKey(), metricAndShowEntry.getValue(), null));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (metricDetailMap.isEmpty()) {
|
||||||
return Result.buildSuc();
|
return Result.buildSuc();
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<UserMetricConfig> userMetricConfigs = getUserMetricConfig(operator);
|
Set<UserMetricConfig> userMetricConfigs = getUserMetricConfig(operator);
|
||||||
for(Map.Entry<String, Boolean> metricAndShowEntry : metricsSetMap.entrySet()){
|
for (MetricDetailDTO metricDetailDTO : metricDetailMap.values()) {
|
||||||
UserMetricConfig userMetricConfig = new UserMetricConfig(type, metricAndShowEntry.getKey(), metricAndShowEntry.getValue());
|
UserMetricConfig userMetricConfig = new UserMetricConfig(type, metricDetailDTO.getMetric(), metricDetailDTO.getSet(), metricDetailDTO.getRank());
|
||||||
userMetricConfigs.remove(userMetricConfig);
|
userMetricConfigs.remove(userMetricConfig);
|
||||||
userMetricConfigs.add(userMetricConfig);
|
userMetricConfigs.add(userMetricConfig);
|
||||||
}
|
}
|
||||||
@@ -228,7 +249,7 @@ public class VersionControlManagerImpl implements VersionControlManager {
|
|||||||
return defaultMetrics;
|
return defaultMetrics;
|
||||||
}
|
}
|
||||||
|
|
||||||
return JSON.parseObject(value, new TypeReference<Set<UserMetricConfig>>(){});
|
return JSON.parseObject(value, new TypeReference<Set<UserMetricConfig>>() {});
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args){
|
public static void main(String[] args){
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import com.didiglobal.logi.log.LogFactory;
|
|||||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.*;
|
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.*;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.po.BaseESPO;
|
import com.xiaojukeji.know.streaming.km.common.bean.po.BaseESPO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.*;
|
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.*;
|
||||||
import com.xiaojukeji.know.streaming.km.common.enums.metric.KafkaMetricIndexEnum;
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
||||||
import com.xiaojukeji.know.streaming.km.common.utils.NamedThreadFactory;
|
import com.xiaojukeji.know.streaming.km.common.utils.NamedThreadFactory;
|
||||||
@@ -21,6 +20,8 @@ import java.util.concurrent.LinkedBlockingDeque;
|
|||||||
import java.util.concurrent.ThreadPoolExecutor;
|
import java.util.concurrent.ThreadPoolExecutor;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.*;
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
public class MetricESSender implements ApplicationListener<BaseMetricEvent> {
|
public class MetricESSender implements ApplicationListener<BaseMetricEvent> {
|
||||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||||
@@ -41,37 +42,37 @@ public class MetricESSender implements ApplicationListener<BaseMetricEvent> {
|
|||||||
public void onApplicationEvent(BaseMetricEvent event) {
|
public void onApplicationEvent(BaseMetricEvent event) {
|
||||||
if(event instanceof BrokerMetricEvent) {
|
if(event instanceof BrokerMetricEvent) {
|
||||||
BrokerMetricEvent brokerMetricEvent = (BrokerMetricEvent)event;
|
BrokerMetricEvent brokerMetricEvent = (BrokerMetricEvent)event;
|
||||||
send2es(KafkaMetricIndexEnum.BROKER_INFO,
|
send2es(BROKER_INDEX,
|
||||||
ConvertUtil.list2List(brokerMetricEvent.getBrokerMetrics(), BrokerMetricPO.class)
|
ConvertUtil.list2List(brokerMetricEvent.getBrokerMetrics(), BrokerMetricPO.class)
|
||||||
);
|
);
|
||||||
|
|
||||||
} else if(event instanceof ClusterMetricEvent) {
|
} else if(event instanceof ClusterMetricEvent) {
|
||||||
ClusterMetricEvent clusterMetricEvent = (ClusterMetricEvent)event;
|
ClusterMetricEvent clusterMetricEvent = (ClusterMetricEvent)event;
|
||||||
send2es(KafkaMetricIndexEnum.CLUSTER_INFO,
|
send2es(CLUSTER_INDEX,
|
||||||
ConvertUtil.list2List(clusterMetricEvent.getClusterMetrics(), ClusterMetricPO.class)
|
ConvertUtil.list2List(clusterMetricEvent.getClusterMetrics(), ClusterMetricPO.class)
|
||||||
);
|
);
|
||||||
|
|
||||||
} else if(event instanceof TopicMetricEvent) {
|
} else if(event instanceof TopicMetricEvent) {
|
||||||
TopicMetricEvent topicMetricEvent = (TopicMetricEvent)event;
|
TopicMetricEvent topicMetricEvent = (TopicMetricEvent)event;
|
||||||
send2es(KafkaMetricIndexEnum.TOPIC_INFO,
|
send2es(TOPIC_INDEX,
|
||||||
ConvertUtil.list2List(topicMetricEvent.getTopicMetrics(), TopicMetricPO.class)
|
ConvertUtil.list2List(topicMetricEvent.getTopicMetrics(), TopicMetricPO.class)
|
||||||
);
|
);
|
||||||
|
|
||||||
} else if(event instanceof PartitionMetricEvent) {
|
} else if(event instanceof PartitionMetricEvent) {
|
||||||
PartitionMetricEvent partitionMetricEvent = (PartitionMetricEvent)event;
|
PartitionMetricEvent partitionMetricEvent = (PartitionMetricEvent)event;
|
||||||
send2es(KafkaMetricIndexEnum.PARTITION_INFO,
|
send2es(PARTITION_INDEX,
|
||||||
ConvertUtil.list2List(partitionMetricEvent.getPartitionMetrics(), PartitionMetricPO.class)
|
ConvertUtil.list2List(partitionMetricEvent.getPartitionMetrics(), PartitionMetricPO.class)
|
||||||
);
|
);
|
||||||
|
|
||||||
} else if(event instanceof GroupMetricEvent) {
|
} else if(event instanceof GroupMetricEvent) {
|
||||||
GroupMetricEvent groupMetricEvent = (GroupMetricEvent)event;
|
GroupMetricEvent groupMetricEvent = (GroupMetricEvent)event;
|
||||||
send2es(KafkaMetricIndexEnum.GROUP_INFO,
|
send2es(GROUP_INDEX,
|
||||||
ConvertUtil.list2List(groupMetricEvent.getGroupMetrics(), GroupMetricPO.class)
|
ConvertUtil.list2List(groupMetricEvent.getGroupMetrics(), GroupMetricPO.class)
|
||||||
);
|
);
|
||||||
|
|
||||||
} else if(event instanceof ReplicaMetricEvent) {
|
} else if(event instanceof ReplicaMetricEvent) {
|
||||||
ReplicaMetricEvent replicaMetricEvent = (ReplicaMetricEvent)event;
|
ReplicaMetricEvent replicaMetricEvent = (ReplicaMetricEvent)event;
|
||||||
send2es(KafkaMetricIndexEnum.REPLICATION_INFO,
|
send2es(REPLICATION_INDEX,
|
||||||
ConvertUtil.list2List(replicaMetricEvent.getReplicationMetrics(), ReplicationMetricPO.class)
|
ConvertUtil.list2List(replicaMetricEvent.getReplicationMetrics(), ReplicationMetricPO.class)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -80,19 +81,19 @@ public class MetricESSender implements ApplicationListener<BaseMetricEvent> {
|
|||||||
/**
|
/**
|
||||||
* 根据不同监控维度来发送
|
* 根据不同监控维度来发送
|
||||||
*/
|
*/
|
||||||
private boolean send2es(KafkaMetricIndexEnum stats, List<? extends BaseESPO> statsList){
|
private boolean send2es(String index, List<? extends BaseESPO> statsList){
|
||||||
if (CollectionUtils.isEmpty(statsList)) {
|
if (CollectionUtils.isEmpty(statsList)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!EnvUtil.isOnline()) {
|
if (!EnvUtil.isOnline()) {
|
||||||
LOGGER.info("class=MetricESSender||method=send2es||ariusStats={}||size={}",
|
LOGGER.info("class=MetricESSender||method=send2es||ariusStats={}||size={}",
|
||||||
stats.getIndex(), statsList.size());
|
index, statsList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
BaseMetricESDAO baseMetricESDao = BaseMetricESDAO.getByStatsType(stats);
|
BaseMetricESDAO baseMetricESDao = BaseMetricESDAO.getByStatsType(index);
|
||||||
if (Objects.isNull( baseMetricESDao )) {
|
if (Objects.isNull( baseMetricESDao )) {
|
||||||
LOGGER.error("class=MetricESSender||method=send2es||errMsg=fail to find {}", stats.getIndex());
|
LOGGER.error("class=MetricESSender||method=send2es||errMsg=fail to find {}", index);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package com.xiaojukeji.know.streaming.km.common.bean.dto.group;
|
|||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.partition.PartitionOffsetDTO;
|
import com.xiaojukeji.know.streaming.km.common.bean.dto.partition.PartitionOffsetDTO;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.topic.ClusterTopicDTO;
|
import com.xiaojukeji.know.streaming.km.common.bean.dto.topic.ClusterTopicDTO;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.enums.OffsetTypeEnum;
|
||||||
import io.swagger.annotations.ApiModelProperty;
|
import io.swagger.annotations.ApiModelProperty;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
|
||||||
@@ -23,7 +24,7 @@ public class GroupOffsetResetDTO extends ClusterTopicDTO {
|
|||||||
private String groupName;
|
private String groupName;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @see com.xiaojukeji.know.streaming.km.common.enums.GroupOffsetResetEnum
|
* @see OffsetTypeEnum
|
||||||
*/
|
*/
|
||||||
@NotNull(message = "resetType不允许为空")
|
@NotNull(message = "resetType不允许为空")
|
||||||
@ApiModelProperty(value = "重置方式", example = "1")
|
@ApiModelProperty(value = "重置方式", example = "1")
|
||||||
|
|||||||
@@ -0,0 +1,28 @@
|
|||||||
|
package com.xiaojukeji.know.streaming.km.common.bean.dto.metrices;
|
||||||
|
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.dto.BaseDTO;
|
||||||
|
import io.swagger.annotations.ApiModel;
|
||||||
|
import io.swagger.annotations.ApiModelProperty;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author didi
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@ApiModel(description = "指标详细属性信息")
|
||||||
|
public class MetricDetailDTO extends BaseDTO {
|
||||||
|
|
||||||
|
@ApiModelProperty("指标名称")
|
||||||
|
private String metric;
|
||||||
|
|
||||||
|
@ApiModelProperty("指标是否显示")
|
||||||
|
private Boolean set;
|
||||||
|
|
||||||
|
@ApiModelProperty("指标优先级")
|
||||||
|
private Integer rank;
|
||||||
|
|
||||||
|
}
|
||||||
@@ -7,6 +7,7 @@ import lombok.AllArgsConstructor;
|
|||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
import lombok.NoArgsConstructor;
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
|
||||||
@@ -17,4 +18,7 @@ import java.util.Map;
|
|||||||
public class UserMetricConfigDTO extends BaseDTO {
|
public class UserMetricConfigDTO extends BaseDTO {
|
||||||
@ApiModelProperty("指标展示设置项,key:指标名;value:是否展现(true展现/false不展现)")
|
@ApiModelProperty("指标展示设置项,key:指标名;value:是否展现(true展现/false不展现)")
|
||||||
private Map<String, Boolean> metricsSet;
|
private Map<String, Boolean> metricsSet;
|
||||||
|
|
||||||
|
@ApiModelProperty("指标自定义属性列表")
|
||||||
|
private List<MetricDetailDTO> metricDetailDTOList;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.common.bean.dto.topic;
|
package com.xiaojukeji.know.streaming.km.common.bean.dto.topic;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.BaseDTO;
|
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationSortDTO;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.enums.OffsetTypeEnum;
|
||||||
import io.swagger.annotations.ApiModel;
|
import io.swagger.annotations.ApiModel;
|
||||||
import io.swagger.annotations.ApiModelProperty;
|
import io.swagger.annotations.ApiModelProperty;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
@@ -15,7 +16,7 @@ import javax.validation.constraints.NotNull;
|
|||||||
@Data
|
@Data
|
||||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
@ApiModel(description = "Topic记录")
|
@ApiModel(description = "Topic记录")
|
||||||
public class TopicRecordDTO extends BaseDTO {
|
public class TopicRecordDTO extends PaginationSortDTO {
|
||||||
@NotNull(message = "truncate不允许为空")
|
@NotNull(message = "truncate不允许为空")
|
||||||
@ApiModelProperty(value = "是否截断", example = "true")
|
@ApiModelProperty(value = "是否截断", example = "true")
|
||||||
private Boolean truncate;
|
private Boolean truncate;
|
||||||
@@ -34,4 +35,13 @@ public class TopicRecordDTO extends BaseDTO {
|
|||||||
|
|
||||||
@ApiModelProperty(value = "预览超时时间", example = "10000")
|
@ApiModelProperty(value = "预览超时时间", example = "10000")
|
||||||
private Long pullTimeoutUnitMs = 8000L;
|
private Long pullTimeoutUnitMs = 8000L;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see OffsetTypeEnum
|
||||||
|
*/
|
||||||
|
@ApiModelProperty(value = "offset", example = "")
|
||||||
|
private Integer filterOffsetReset = 0;
|
||||||
|
|
||||||
|
@ApiModelProperty(value = "开始日期时间戳", example = "")
|
||||||
|
private Long startTimestampUnitMs;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,12 +1,17 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.broker;
|
package com.xiaojukeji.know.streaming.km.common.bean.entity.broker;
|
||||||
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.zookeeper.znode.brokers.BrokerMetadata;
|
|
||||||
|
import com.alibaba.fastjson.TypeReference;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.common.IpPortData;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.po.broker.BrokerPO;
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.AllArgsConstructor;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
import lombok.NoArgsConstructor;
|
import lombok.NoArgsConstructor;
|
||||||
import org.apache.kafka.common.Node;
|
import org.apache.kafka.common.Node;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author didi
|
* @author didi
|
||||||
@@ -55,6 +60,11 @@ public class Broker implements Serializable {
|
|||||||
*/
|
*/
|
||||||
private Integer status;
|
private Integer status;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 监听信息
|
||||||
|
*/
|
||||||
|
private Map<String, IpPortData> endpointMap;
|
||||||
|
|
||||||
public static Broker buildFrom(Long clusterPhyId, Node node, Long startTimestamp) {
|
public static Broker buildFrom(Long clusterPhyId, Node node, Long startTimestamp) {
|
||||||
Broker metadata = new Broker();
|
Broker metadata = new Broker();
|
||||||
metadata.setClusterPhyId(clusterPhyId);
|
metadata.setClusterPhyId(clusterPhyId);
|
||||||
@@ -68,17 +78,25 @@ public class Broker implements Serializable {
|
|||||||
return metadata;
|
return metadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Broker buildFrom(Long clusterPhyId, Integer brokerId, BrokerMetadata brokerMetadata) {
|
public static Broker buildFrom(BrokerPO brokerPO) {
|
||||||
Broker metadata = new Broker();
|
Broker broker = ConvertUtil.obj2Obj(brokerPO, Broker.class);
|
||||||
metadata.setClusterPhyId(clusterPhyId);
|
String endpointMapStr = brokerPO.getEndpointMap();
|
||||||
metadata.setBrokerId(brokerId);
|
if (broker == null || endpointMapStr == null || endpointMapStr.equals("")) {
|
||||||
metadata.setHost(brokerMetadata.getHost());
|
return broker;
|
||||||
metadata.setPort(brokerMetadata.getPort());
|
}
|
||||||
metadata.setJmxPort(brokerMetadata.getJmxPort());
|
|
||||||
metadata.setStartTimestamp(brokerMetadata.getTimestamp());
|
// 填充endpoint信息
|
||||||
metadata.setRack(brokerMetadata.getRack());
|
Map<String, IpPortData> endpointMap = ConvertUtil.str2ObjByJson(endpointMapStr, new TypeReference<Map<String, IpPortData>>(){});
|
||||||
metadata.setStatus(1);
|
broker.setEndpointMap(endpointMap);
|
||||||
return metadata;
|
return broker;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getJmxHost(String endPoint) {
|
||||||
|
if (endPoint == null || endpointMap == null) {
|
||||||
|
return host;
|
||||||
|
}
|
||||||
|
IpPortData ip = endpointMap.get(endPoint);
|
||||||
|
return ip != null ? ip.getIp() : host;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean alive() {
|
public boolean alive() {
|
||||||
|
|||||||
@@ -27,6 +27,9 @@ public class JmxConfig implements Serializable {
|
|||||||
|
|
||||||
@ApiModelProperty(value="SSL情况下的token", example = "KsKmCCY19")
|
@ApiModelProperty(value="SSL情况下的token", example = "KsKmCCY19")
|
||||||
private String token;
|
private String token;
|
||||||
|
|
||||||
|
@ApiModelProperty(value="使用哪个endpoint网络", example = "EXTERNAL")
|
||||||
|
private String useWhichEndpoint;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.config.metric;
|
package com.xiaojukeji.know.streaming.km.common.bean.entity.config.metric;
|
||||||
|
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.AllArgsConstructor;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
import lombok.NoArgsConstructor;
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
@NoArgsConstructor
|
@NoArgsConstructor
|
||||||
@AllArgsConstructor
|
|
||||||
public class UserMetricConfig {
|
public class UserMetricConfig {
|
||||||
|
|
||||||
private int type;
|
private int type;
|
||||||
@@ -15,6 +15,22 @@ public class UserMetricConfig {
|
|||||||
|
|
||||||
private boolean set;
|
private boolean set;
|
||||||
|
|
||||||
|
private Integer rank;
|
||||||
|
|
||||||
|
public UserMetricConfig(int type, String metric, boolean set, Integer rank) {
|
||||||
|
this.type = type;
|
||||||
|
this.metric = metric;
|
||||||
|
this.set = set;
|
||||||
|
this.rank = rank;
|
||||||
|
}
|
||||||
|
|
||||||
|
public UserMetricConfig(int type, String metric, boolean set) {
|
||||||
|
this.type = type;
|
||||||
|
this.metric = metric;
|
||||||
|
this.set = set;
|
||||||
|
this.rank = null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode(){
|
public int hashCode(){
|
||||||
return metric.hashCode() << 1 + type;
|
return metric.hashCode() << 1 + type;
|
||||||
|
|||||||
@@ -0,0 +1,19 @@
|
|||||||
|
package com.xiaojukeji.know.streaming.km.common.bean.entity.param.partition;
|
||||||
|
|
||||||
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterPhyParam;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
import org.apache.kafka.common.TopicPartition;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
public class BatchPartitionParam extends ClusterPhyParam {
|
||||||
|
private List<TopicPartition> tpList;
|
||||||
|
|
||||||
|
public BatchPartitionParam(Long clusterPhyId, List<TopicPartition> tpList) {
|
||||||
|
super(clusterPhyId);
|
||||||
|
this.tpList = tpList;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.param.partition;
|
package com.xiaojukeji.know.streaming.km.common.bean.entity.param.partition;
|
||||||
|
|
||||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterPhyParam;
|
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.topic.TopicParam;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
import lombok.NoArgsConstructor;
|
import lombok.NoArgsConstructor;
|
||||||
import org.apache.kafka.clients.admin.OffsetSpec;
|
import org.apache.kafka.clients.admin.OffsetSpec;
|
||||||
@@ -10,13 +10,13 @@ import java.util.Map;
|
|||||||
|
|
||||||
@Data
|
@Data
|
||||||
@NoArgsConstructor
|
@NoArgsConstructor
|
||||||
public class PartitionOffsetParam extends ClusterPhyParam {
|
public class PartitionOffsetParam extends TopicParam {
|
||||||
private Map<TopicPartition, OffsetSpec> topicPartitionOffsets;
|
private Map<TopicPartition, OffsetSpec> topicPartitionOffsets;
|
||||||
|
|
||||||
private Long timestamp;
|
private Long timestamp;
|
||||||
|
|
||||||
public PartitionOffsetParam(Long clusterPhyId, Map<TopicPartition, OffsetSpec> topicPartitionOffsets, Long timestamp) {
|
public PartitionOffsetParam(Long clusterPhyId, String topicName, Map<TopicPartition, OffsetSpec> topicPartitionOffsets, Long timestamp) {
|
||||||
super(clusterPhyId);
|
super(clusterPhyId, topicName);
|
||||||
this.topicPartitionOffsets = topicPartitionOffsets;
|
this.topicPartitionOffsets = topicPartitionOffsets;
|
||||||
this.timestamp = timestamp;
|
this.timestamp = timestamp;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,4 +15,12 @@ public class TopicParam extends ClusterPhyParam {
|
|||||||
super(clusterPhyId);
|
super(clusterPhyId);
|
||||||
this.topicName = topicName;
|
this.topicName = topicName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "TopicParam{" +
|
||||||
|
"clusterPhyId=" + clusterPhyId +
|
||||||
|
", topicName='" + topicName + '\'' +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,15 @@
|
|||||||
|
package com.xiaojukeji.know.streaming.km.common.bean.event.cluster;
|
||||||
|
|
||||||
|
import lombok.Getter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 集群新增事件
|
||||||
|
* @author zengqiao
|
||||||
|
* @date 22/02/25
|
||||||
|
*/
|
||||||
|
@Getter
|
||||||
|
public class ClusterPhyAddedEvent extends ClusterPhyBaseEvent {
|
||||||
|
public ClusterPhyAddedEvent(Object source, Long clusterPhyId) {
|
||||||
|
super(source, clusterPhyId);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.common.bean.event.kafka.zk;
|
|
||||||
|
|
||||||
import lombok.Getter;
|
|
||||||
|
|
||||||
@Getter
|
|
||||||
public abstract class BaseKafkaZKEvent {
|
|
||||||
/**
|
|
||||||
* 触发时间
|
|
||||||
*/
|
|
||||||
protected Long eventTime;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 初始化数据的事件
|
|
||||||
*/
|
|
||||||
protected Boolean initEvent;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 集群ID
|
|
||||||
*/
|
|
||||||
protected Long clusterPhyId;
|
|
||||||
|
|
||||||
protected BaseKafkaZKEvent(Long eventTime, Long clusterPhyId) {
|
|
||||||
this.eventTime = eventTime;
|
|
||||||
this.clusterPhyId = clusterPhyId;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.common.bean.event.kafka.zk;
|
|
||||||
|
|
||||||
import lombok.Getter;
|
|
||||||
|
|
||||||
@Getter
|
|
||||||
public class ControllerChangeEvent extends BaseKafkaZKEvent {
|
|
||||||
public ControllerChangeEvent(Long eventTime, Long clusterPhyId) {
|
|
||||||
super(eventTime, clusterPhyId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -42,4 +42,9 @@ public class BrokerPO extends BasePO {
|
|||||||
* Broker状态
|
* Broker状态
|
||||||
*/
|
*/
|
||||||
private Integer status;
|
private Integer status;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 监听信息
|
||||||
|
*/
|
||||||
|
private String endpointMap;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,4 +14,7 @@ import lombok.NoArgsConstructor;
|
|||||||
public class UserMetricConfigVO extends VersionItemVO {
|
public class UserMetricConfigVO extends VersionItemVO {
|
||||||
@ApiModelProperty(value = "该指标用户是否设置展现", example = "true")
|
@ApiModelProperty(value = "该指标用户是否设置展现", example = "true")
|
||||||
private Boolean set;
|
private Boolean set;
|
||||||
|
|
||||||
|
@ApiModelProperty(value = "该指标展示优先级", example = "1")
|
||||||
|
private Integer rank;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,6 +29,10 @@ public class MetricPointVO implements Comparable<MetricPointVO> {
|
|||||||
@Override
|
@Override
|
||||||
public int compareTo(MetricPointVO o) {
|
public int compareTo(MetricPointVO o) {
|
||||||
if(null == o){return 0;}
|
if(null == o){return 0;}
|
||||||
|
if(null == this.getTimeStamp()
|
||||||
|
|| null == o.getTimeStamp()){
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
return this.getTimeStamp().intValue() - o.getTimeStamp().intValue();
|
return this.getTimeStamp().intValue() - o.getTimeStamp().intValue();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -42,6 +42,7 @@ public class Constant {
|
|||||||
*/
|
*/
|
||||||
public static final Integer DEFAULT_CLUSTER_HEALTH_SCORE = 90;
|
public static final Integer DEFAULT_CLUSTER_HEALTH_SCORE = 90;
|
||||||
|
|
||||||
|
|
||||||
public static final String DEFAULT_USER_NAME = "know-streaming-app";
|
public static final String DEFAULT_USER_NAME = "know-streaming-app";
|
||||||
|
|
||||||
public static final int INVALID_CODE = -1;
|
public static final int INVALID_CODE = -1;
|
||||||
@@ -63,4 +64,6 @@ public class Constant {
|
|||||||
public static final String COLLECT_METRICS_COST_TIME_METRICS_NAME = "CollectMetricsCostTimeUnitSec";
|
public static final String COLLECT_METRICS_COST_TIME_METRICS_NAME = "CollectMetricsCostTimeUnitSec";
|
||||||
public static final Float COLLECT_METRICS_ERROR_COST_TIME = -1.0F;
|
public static final Float COLLECT_METRICS_ERROR_COST_TIME = -1.0F;
|
||||||
|
|
||||||
|
public static final Integer DEFAULT_RETRY_TIME = 3;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,647 @@
|
|||||||
|
package com.xiaojukeji.know.streaming.km.common.constant;
|
||||||
|
|
||||||
|
public class ESIndexConstant {
|
||||||
|
|
||||||
|
public final static String TOPIC_INDEX = "ks_kafka_topic_metric";
|
||||||
|
public final static String TOPIC_TEMPLATE = "{\n" +
|
||||||
|
" \"order\" : 10,\n" +
|
||||||
|
" \"index_patterns\" : [\n" +
|
||||||
|
" \"ks_kafka_topic_metric*\"\n" +
|
||||||
|
" ],\n" +
|
||||||
|
" \"settings\" : {\n" +
|
||||||
|
" \"index\" : {\n" +
|
||||||
|
" \"number_of_shards\" : \"10\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"mappings\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"brokerId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"routingValue\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"topic\" : {\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"clusterPhyId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"metrics\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"BytesIn_min_15\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"Messages\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesRejected\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"PartitionURP\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckTotal\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"ReplicationCount\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"ReplicationBytesOut\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"ReplicationBytesIn\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"FailedFetchRequests\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesIn_min_5\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthScore\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"LogSize\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesOut\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesOut_min_15\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"FailedProduceRequests\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesIn\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesOut_min_5\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"MessagesIn\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"TotalProduceRequests\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckPassed\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"brokerAgg\" : {\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"key\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"timestamp\" : {\n" +
|
||||||
|
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||||
|
" \"index\" : true,\n" +
|
||||||
|
" \"type\" : \"date\",\n" +
|
||||||
|
" \"doc_values\" : true\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"aliases\" : { }\n" +
|
||||||
|
" }";
|
||||||
|
|
||||||
|
public final static String CLUSTER_INDEX = "ks_kafka_cluster_metric";
|
||||||
|
public final static String CLUSTER_TEMPLATE = "{\n" +
|
||||||
|
" \"order\" : 10,\n" +
|
||||||
|
" \"index_patterns\" : [\n" +
|
||||||
|
" \"ks_kafka_cluster_metric*\"\n" +
|
||||||
|
" ],\n" +
|
||||||
|
" \"settings\" : {\n" +
|
||||||
|
" \"index\" : {\n" +
|
||||||
|
" \"number_of_shards\" : \"10\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"mappings\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"routingValue\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"clusterPhyId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"metrics\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"Connections\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesIn_min_15\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"PartitionURP\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthScore_Topics\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"EventQueueSize\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"ActiveControllerCount\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"GroupDeads\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesIn_min_5\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckTotal_Topics\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"Partitions\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesOut\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"Groups\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesOut_min_15\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"TotalRequestQueueSize\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckPassed_Groups\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"TotalProduceRequests\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckPassed\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"TotalLogSize\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"GroupEmptys\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"PartitionNoLeader\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthScore_Brokers\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"Messages\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"Topics\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"PartitionMinISR_E\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckTotal\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"Brokers\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"Replicas\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckTotal_Groups\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"GroupRebalances\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"MessageIn\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthScore\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckPassed_Topics\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckTotal_Brokers\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"PartitionMinISR_S\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesIn\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesOut_min_5\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"GroupActives\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"MessagesIn\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"GroupReBalances\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckPassed_Brokers\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthScore_Groups\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"TotalResponseQueueSize\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"Zookeepers\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"LeaderMessages\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthScore_Cluster\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckPassed_Cluster\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckTotal_Cluster\" : {\n" +
|
||||||
|
" \"type\" : \"double\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"key\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"timestamp\" : {\n" +
|
||||||
|
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||||
|
" \"type\" : \"date\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"aliases\" : { }\n" +
|
||||||
|
" }";
|
||||||
|
|
||||||
|
public final static String BROKER_INDEX = "ks_kafka_broker_metric";
|
||||||
|
public final static String BROKER_TEMPLATE = "{\n" +
|
||||||
|
" \"order\" : 10,\n" +
|
||||||
|
" \"index_patterns\" : [\n" +
|
||||||
|
" \"ks_kafka_broker_metric*\"\n" +
|
||||||
|
" ],\n" +
|
||||||
|
" \"settings\" : {\n" +
|
||||||
|
" \"index\" : {\n" +
|
||||||
|
" \"number_of_shards\" : \"10\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"mappings\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"brokerId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"routingValue\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"clusterPhyId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"metrics\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"NetworkProcessorAvgIdle\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"UnderReplicatedPartitions\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesIn_min_15\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckTotal\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"RequestHandlerAvgIdle\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"connectionsCount\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesIn_min_5\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthScore\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesOut\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesOut_min_15\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesIn\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"BytesOut_min_5\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"TotalRequestQueueSize\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"MessagesIn\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"TotalProduceRequests\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckPassed\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"TotalResponseQueueSize\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"key\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"timestamp\" : {\n" +
|
||||||
|
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||||
|
" \"index\" : true,\n" +
|
||||||
|
" \"type\" : \"date\",\n" +
|
||||||
|
" \"doc_values\" : true\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"aliases\" : { }\n" +
|
||||||
|
" }";
|
||||||
|
|
||||||
|
public final static String PARTITION_INDEX = "ks_kafka_partition_metric";
|
||||||
|
public final static String PARTITION_TEMPLATE = "{\n" +
|
||||||
|
" \"order\" : 10,\n" +
|
||||||
|
" \"index_patterns\" : [\n" +
|
||||||
|
" \"ks_kafka_partition_metric*\"\n" +
|
||||||
|
" ],\n" +
|
||||||
|
" \"settings\" : {\n" +
|
||||||
|
" \"index\" : {\n" +
|
||||||
|
" \"number_of_shards\" : \"10\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"mappings\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"brokerId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"partitionId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"routingValue\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"clusterPhyId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"topic\" : {\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"metrics\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"LogStartOffset\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"Messages\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"LogEndOffset\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"key\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"timestamp\" : {\n" +
|
||||||
|
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||||
|
" \"index\" : true,\n" +
|
||||||
|
" \"type\" : \"date\",\n" +
|
||||||
|
" \"doc_values\" : true\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"aliases\" : { }\n" +
|
||||||
|
" }";
|
||||||
|
|
||||||
|
public final static String GROUP_INDEX = "ks_kafka_group_metric";
|
||||||
|
public final static String GROUP_TEMPLATE = "{\n" +
|
||||||
|
" \"order\" : 10,\n" +
|
||||||
|
" \"index_patterns\" : [\n" +
|
||||||
|
" \"ks_kafka_group_metric*\"\n" +
|
||||||
|
" ],\n" +
|
||||||
|
" \"settings\" : {\n" +
|
||||||
|
" \"index\" : {\n" +
|
||||||
|
" \"number_of_shards\" : \"10\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"mappings\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"group\" : {\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"partitionId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"routingValue\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"clusterPhyId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"topic\" : {\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"metrics\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"HealthScore\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"Lag\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"OffsetConsumed\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckTotal\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"HealthCheckPassed\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"groupMetric\" : {\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"key\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"timestamp\" : {\n" +
|
||||||
|
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||||
|
" \"index\" : true,\n" +
|
||||||
|
" \"type\" : \"date\",\n" +
|
||||||
|
" \"doc_values\" : true\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"aliases\" : { }\n" +
|
||||||
|
" }";
|
||||||
|
|
||||||
|
public final static String REPLICATION_INDEX = "ks_kafka_replication_metric";
|
||||||
|
public final static String REPLICATION_TEMPLATE = "{\n" +
|
||||||
|
" \"order\" : 10,\n" +
|
||||||
|
" \"index_patterns\" : [\n" +
|
||||||
|
" \"ks_kafka_partition_metric*\"\n" +
|
||||||
|
" ],\n" +
|
||||||
|
" \"settings\" : {\n" +
|
||||||
|
" \"index\" : {\n" +
|
||||||
|
" \"number_of_shards\" : \"10\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"mappings\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"brokerId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"partitionId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"routingValue\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"clusterPhyId\" : {\n" +
|
||||||
|
" \"type\" : \"long\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"topic\" : {\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"metrics\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"LogStartOffset\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"Messages\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"LogEndOffset\" : {\n" +
|
||||||
|
" \"type\" : \"float\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"key\" : {\n" +
|
||||||
|
" \"type\" : \"text\",\n" +
|
||||||
|
" \"fields\" : {\n" +
|
||||||
|
" \"keyword\" : {\n" +
|
||||||
|
" \"ignore_above\" : 256,\n" +
|
||||||
|
" \"type\" : \"keyword\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"timestamp\" : {\n" +
|
||||||
|
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||||
|
" \"index\" : true,\n" +
|
||||||
|
" \"type\" : \"date\",\n" +
|
||||||
|
" \"doc_values\" : true\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"aliases\" : { }\n" +
|
||||||
|
" }[root@10-255-0-23 template]# cat ks_kafka_replication_metric\n" +
|
||||||
|
"PUT _template/ks_kafka_replication_metric\n" +
|
||||||
|
"{\n" +
|
||||||
|
" \"order\" : 10,\n" +
|
||||||
|
" \"index_patterns\" : [\n" +
|
||||||
|
" \"ks_kafka_replication_metric*\"\n" +
|
||||||
|
" ],\n" +
|
||||||
|
" \"settings\" : {\n" +
|
||||||
|
" \"index\" : {\n" +
|
||||||
|
" \"number_of_shards\" : \"10\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"mappings\" : {\n" +
|
||||||
|
" \"properties\" : {\n" +
|
||||||
|
" \"timestamp\" : {\n" +
|
||||||
|
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||||
|
" \"index\" : true,\n" +
|
||||||
|
" \"type\" : \"date\",\n" +
|
||||||
|
" \"doc_values\" : true\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"aliases\" : { }\n" +
|
||||||
|
" }";
|
||||||
|
|
||||||
|
}
|
||||||
@@ -33,7 +33,7 @@ public class KafkaConstant {
|
|||||||
|
|
||||||
public static final Integer DATA_VERSION_ONE = 1;
|
public static final Integer DATA_VERSION_ONE = 1;
|
||||||
|
|
||||||
public static final Integer ADMIN_CLIENT_REQUEST_TIME_OUT_UNIT_MS = 3000;
|
public static final Integer ADMIN_CLIENT_REQUEST_TIME_OUT_UNIT_MS = 5000;
|
||||||
|
|
||||||
public static final Integer KAFKA_SASL_SCRAM_ITERATIONS = 8192;
|
public static final Integer KAFKA_SASL_SCRAM_ITERATIONS = 8192;
|
||||||
|
|
||||||
@@ -41,6 +41,8 @@ public class KafkaConstant {
|
|||||||
|
|
||||||
public static final Long POLL_ONCE_TIMEOUT_UNIT_MS = 2000L;
|
public static final Long POLL_ONCE_TIMEOUT_UNIT_MS = 2000L;
|
||||||
|
|
||||||
|
public static final String CONTROLLER_ROLE = "controller";
|
||||||
|
|
||||||
public static final Map<String, ConfigDef.ConfigKey> KAFKA_ALL_CONFIG_DEF_MAP = new ConcurrentHashMap<>();
|
public static final Map<String, ConfigDef.ConfigKey> KAFKA_ALL_CONFIG_DEF_MAP = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
static {
|
static {
|
||||||
|
|||||||
@@ -52,6 +52,10 @@ public class MsgConstant {
|
|||||||
|
|
||||||
/**************************************************** Partition ****************************************************/
|
/**************************************************** Partition ****************************************************/
|
||||||
|
|
||||||
|
public static String getPartitionNoLeader(Long clusterPhyId, String topicName) {
|
||||||
|
return String.format("集群ID:[%d] Topic名称:[%s] 所有分区NoLeader", clusterPhyId, topicName);
|
||||||
|
}
|
||||||
|
|
||||||
public static String getPartitionNotExist(Long clusterPhyId, String topicName) {
|
public static String getPartitionNotExist(Long clusterPhyId, String topicName) {
|
||||||
return String.format("集群ID:[%d] Topic名称:[%s] 存在非法的分区ID", clusterPhyId, topicName);
|
return String.format("集群ID:[%d] Topic名称:[%s] 存在非法的分区ID", clusterPhyId, topicName);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -170,6 +170,7 @@ public class ReassignConverter {
|
|||||||
detail.setOriginalBrokerIdList(CommonUtils.string2IntList(subJobPO.getOriginalBrokerIds()));
|
detail.setOriginalBrokerIdList(CommonUtils.string2IntList(subJobPO.getOriginalBrokerIds()));
|
||||||
detail.setReassignBrokerIdList(CommonUtils.string2IntList(subJobPO.getReassignBrokerIds()));
|
detail.setReassignBrokerIdList(CommonUtils.string2IntList(subJobPO.getReassignBrokerIds()));
|
||||||
detail.setStatus(subJobPO.getStatus());
|
detail.setStatus(subJobPO.getStatus());
|
||||||
|
detail.setOldReplicaNum(detail.getOriginalBrokerIdList().size());
|
||||||
|
|
||||||
ReassignSubJobExtendData extendData = ConvertUtil.str2ObjByJson(subJobPO.getExtendData(), ReassignSubJobExtendData.class);
|
ReassignSubJobExtendData extendData = ConvertUtil.str2ObjByJson(subJobPO.getExtendData(), ReassignSubJobExtendData.class);
|
||||||
if (extendData != null) {
|
if (extendData != null) {
|
||||||
@@ -217,6 +218,7 @@ public class ReassignConverter {
|
|||||||
|
|
||||||
topicDetail.setPresentReplicaNum(partitionDetailList.get(0).getPresentReplicaNum());
|
topicDetail.setPresentReplicaNum(partitionDetailList.get(0).getPresentReplicaNum());
|
||||||
topicDetail.setNewReplicaNum(partitionDetailList.get(0).getNewReplicaNum());
|
topicDetail.setNewReplicaNum(partitionDetailList.get(0).getNewReplicaNum());
|
||||||
|
topicDetail.setOldReplicaNum(partitionDetailList.get(0).getOldReplicaNum());
|
||||||
topicDetail.setOriginalRetentionTimeUnitMs(partitionDetailList.get(0).getOriginalRetentionTimeUnitMs());
|
topicDetail.setOriginalRetentionTimeUnitMs(partitionDetailList.get(0).getOriginalRetentionTimeUnitMs());
|
||||||
topicDetail.setReassignRetentionTimeUnitMs(partitionDetailList.get(0).getReassignRetentionTimeUnitMs());
|
topicDetail.setReassignRetentionTimeUnitMs(partitionDetailList.get(0).getReassignRetentionTimeUnitMs());
|
||||||
|
|
||||||
|
|||||||
@@ -3,19 +3,19 @@ package com.xiaojukeji.know.streaming.km.common.enums;
|
|||||||
import lombok.Getter;
|
import lombok.Getter;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 重置offset
|
* offset类型
|
||||||
* @author zengqiao
|
* @author zengqiao
|
||||||
* @date 19/4/8
|
* @date 19/4/8
|
||||||
*/
|
*/
|
||||||
@Getter
|
@Getter
|
||||||
public enum GroupOffsetResetEnum {
|
public enum OffsetTypeEnum {
|
||||||
LATEST(0, "重置到最新"),
|
LATEST(0, "最新"),
|
||||||
|
|
||||||
EARLIEST(1, "重置到最旧"),
|
EARLIEST(1, "最旧"),
|
||||||
|
|
||||||
PRECISE_TIMESTAMP(2, "按时间进行重置"),
|
PRECISE_TIMESTAMP(2, "指定时间"),
|
||||||
|
|
||||||
PRECISE_OFFSET(3, "重置到指定位置"),
|
PRECISE_OFFSET(3, "指定位置"),
|
||||||
|
|
||||||
;
|
;
|
||||||
|
|
||||||
@@ -23,7 +23,7 @@ public enum GroupOffsetResetEnum {
|
|||||||
|
|
||||||
private final String message;
|
private final String message;
|
||||||
|
|
||||||
GroupOffsetResetEnum(int resetType, String message) {
|
OffsetTypeEnum(int resetType, String message) {
|
||||||
this.resetType = resetType;
|
this.resetType = resetType;
|
||||||
this.message = message;
|
this.message = message;
|
||||||
}
|
}
|
||||||
@@ -26,7 +26,7 @@ public enum HealthCheckNameEnum {
|
|||||||
HealthCheckDimensionEnum.CLUSTER,
|
HealthCheckDimensionEnum.CLUSTER,
|
||||||
"Controller",
|
"Controller",
|
||||||
Constant.HC_CONFIG_NAME_PREFIX + "CLUSTER_NO_CONTROLLER",
|
Constant.HC_CONFIG_NAME_PREFIX + "CLUSTER_NO_CONTROLLER",
|
||||||
"集群Controller数错误",
|
"集群Controller数正常",
|
||||||
HealthCompareValueConfig.class
|
HealthCompareValueConfig.class
|
||||||
),
|
),
|
||||||
|
|
||||||
@@ -34,7 +34,7 @@ public enum HealthCheckNameEnum {
|
|||||||
HealthCheckDimensionEnum.BROKER,
|
HealthCheckDimensionEnum.BROKER,
|
||||||
"RequestQueueSize",
|
"RequestQueueSize",
|
||||||
Constant.HC_CONFIG_NAME_PREFIX + "BROKER_REQUEST_QUEUE_FULL",
|
Constant.HC_CONFIG_NAME_PREFIX + "BROKER_REQUEST_QUEUE_FULL",
|
||||||
"Broker-RequestQueueSize被打满",
|
"Broker-RequestQueueSize指标",
|
||||||
HealthCompareValueConfig.class
|
HealthCompareValueConfig.class
|
||||||
),
|
),
|
||||||
|
|
||||||
@@ -42,7 +42,7 @@ public enum HealthCheckNameEnum {
|
|||||||
HealthCheckDimensionEnum.BROKER,
|
HealthCheckDimensionEnum.BROKER,
|
||||||
"NetworkProcessorAvgIdlePercent",
|
"NetworkProcessorAvgIdlePercent",
|
||||||
Constant.HC_CONFIG_NAME_PREFIX + "BROKER_NETWORK_PROCESSOR_AVG_IDLE_TOO_LOW",
|
Constant.HC_CONFIG_NAME_PREFIX + "BROKER_NETWORK_PROCESSOR_AVG_IDLE_TOO_LOW",
|
||||||
"Broker-NetworkProcessorAvgIdlePercent的Idle过低",
|
"Broker-NetworkProcessorAvgIdlePercent指标",
|
||||||
HealthCompareValueConfig.class
|
HealthCompareValueConfig.class
|
||||||
),
|
),
|
||||||
|
|
||||||
@@ -50,7 +50,7 @@ public enum HealthCheckNameEnum {
|
|||||||
HealthCheckDimensionEnum.GROUP,
|
HealthCheckDimensionEnum.GROUP,
|
||||||
"Group Re-Balance",
|
"Group Re-Balance",
|
||||||
Constant.HC_CONFIG_NAME_PREFIX + "GROUP_RE_BALANCE_TOO_FREQUENTLY",
|
Constant.HC_CONFIG_NAME_PREFIX + "GROUP_RE_BALANCE_TOO_FREQUENTLY",
|
||||||
"Group re-balance太频繁",
|
"Group re-balance频率",
|
||||||
HealthDetectedInLatestMinutesConfig.class
|
HealthDetectedInLatestMinutesConfig.class
|
||||||
),
|
),
|
||||||
|
|
||||||
@@ -66,7 +66,7 @@ public enum HealthCheckNameEnum {
|
|||||||
HealthCheckDimensionEnum.TOPIC,
|
HealthCheckDimensionEnum.TOPIC,
|
||||||
"UnderReplicaTooLong",
|
"UnderReplicaTooLong",
|
||||||
Constant.HC_CONFIG_NAME_PREFIX + "TOPIC_UNDER_REPLICA_TOO_LONG",
|
Constant.HC_CONFIG_NAME_PREFIX + "TOPIC_UNDER_REPLICA_TOO_LONG",
|
||||||
"Topic 长期处于未同步状态",
|
"Topic 未同步持续时间",
|
||||||
HealthDetectedInLatestMinutesConfig.class
|
HealthDetectedInLatestMinutesConfig.class
|
||||||
),
|
),
|
||||||
|
|
||||||
|
|||||||
@@ -1,54 +0,0 @@
|
|||||||
package com.xiaojukeji.know.streaming.km.common.enums.metric;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author: D10865
|
|
||||||
* @description:
|
|
||||||
* @date: Create on 2019/3/11 下午2:19
|
|
||||||
* @modified By D10865
|
|
||||||
*
|
|
||||||
* 不同维度的es监控数据
|
|
||||||
*/
|
|
||||||
public enum KafkaMetricIndexEnum {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* topic 维度
|
|
||||||
*/
|
|
||||||
TOPIC_INFO("ks_kafka_topic_metric"),
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 集群 维度
|
|
||||||
*/
|
|
||||||
CLUSTER_INFO("ks_kafka_cluster_metric"),
|
|
||||||
|
|
||||||
/**
|
|
||||||
* broker 维度
|
|
||||||
*/
|
|
||||||
BROKER_INFO("ks_kafka_broker_metric"),
|
|
||||||
|
|
||||||
/**
|
|
||||||
* partition 维度
|
|
||||||
*/
|
|
||||||
PARTITION_INFO("ks_kafka_partition_metric"),
|
|
||||||
|
|
||||||
/**
|
|
||||||
* group 维度
|
|
||||||
*/
|
|
||||||
GROUP_INFO("ks_kafka_group_metric"),
|
|
||||||
|
|
||||||
/**
|
|
||||||
* replication 维度
|
|
||||||
*/
|
|
||||||
REPLICATION_INFO("ks_kafka_replication_metric"),
|
|
||||||
|
|
||||||
;
|
|
||||||
|
|
||||||
private String index;
|
|
||||||
|
|
||||||
KafkaMetricIndexEnum(String index) {
|
|
||||||
this.index = index;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getIndex() {
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -31,9 +31,11 @@ public enum VersionItemTypeEnum {
|
|||||||
|
|
||||||
|
|
||||||
SERVICE_OP_PARTITION(320, "service_partition_operation"),
|
SERVICE_OP_PARTITION(320, "service_partition_operation"),
|
||||||
|
SERVICE_OP_PARTITION_LEADER(321, "service_partition-leader_operation"),
|
||||||
|
|
||||||
SERVICE_OP_REASSIGNMENT(330, "service_reassign_operation"),
|
SERVICE_OP_REASSIGNMENT(330, "service_reassign_operation"),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 前端操作
|
* 前端操作
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -90,6 +90,8 @@ public class JmxConnectorWrap {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
jmxConnector.close();
|
jmxConnector.close();
|
||||||
|
|
||||||
|
jmxConnector = null;
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOGGER.warn("close JmxConnector exception, physicalClusterId:{} brokerId:{} host:{} port:{}.", physicalClusterId, brokerId, host, port, e);
|
LOGGER.warn("close JmxConnector exception, physicalClusterId:{} brokerId:{} host:{} port:{}.", physicalClusterId, brokerId, host, port, e);
|
||||||
}
|
}
|
||||||
@@ -105,6 +107,11 @@ public class JmxConnectorWrap {
|
|||||||
acquire();
|
acquire();
|
||||||
MBeanServerConnection mBeanServerConnection = jmxConnector.getMBeanServerConnection();
|
MBeanServerConnection mBeanServerConnection = jmxConnector.getMBeanServerConnection();
|
||||||
return mBeanServerConnection.getAttribute(name, attribute);
|
return mBeanServerConnection.getAttribute(name, attribute);
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// 如果是因为连接断开,则进行重新连接,并抛出异常
|
||||||
|
reInitDueIOException();
|
||||||
|
|
||||||
|
throw ioe;
|
||||||
} finally {
|
} finally {
|
||||||
atomicInteger.incrementAndGet();
|
atomicInteger.incrementAndGet();
|
||||||
}
|
}
|
||||||
@@ -120,6 +127,11 @@ public class JmxConnectorWrap {
|
|||||||
acquire();
|
acquire();
|
||||||
MBeanServerConnection mBeanServerConnection = jmxConnector.getMBeanServerConnection();
|
MBeanServerConnection mBeanServerConnection = jmxConnector.getMBeanServerConnection();
|
||||||
return mBeanServerConnection.getAttributes(name, attributes);
|
return mBeanServerConnection.getAttributes(name, attributes);
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// 如果是因为连接断开,则进行重新连接,并抛出异常
|
||||||
|
reInitDueIOException();
|
||||||
|
|
||||||
|
throw ioe;
|
||||||
} finally {
|
} finally {
|
||||||
atomicInteger.incrementAndGet();
|
atomicInteger.incrementAndGet();
|
||||||
}
|
}
|
||||||
@@ -131,6 +143,11 @@ public class JmxConnectorWrap {
|
|||||||
acquire();
|
acquire();
|
||||||
MBeanServerConnection mBeanServerConnection = jmxConnector.getMBeanServerConnection();
|
MBeanServerConnection mBeanServerConnection = jmxConnector.getMBeanServerConnection();
|
||||||
return mBeanServerConnection.queryNames(name, query);
|
return mBeanServerConnection.queryNames(name, query);
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// 如果是因为连接断开,则进行重新连接,并抛出异常
|
||||||
|
reInitDueIOException();
|
||||||
|
|
||||||
|
throw ioe;
|
||||||
} finally {
|
} finally {
|
||||||
atomicInteger.incrementAndGet();
|
atomicInteger.incrementAndGet();
|
||||||
}
|
}
|
||||||
@@ -186,4 +203,26 @@ public class JmxConnectorWrap {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private synchronized void reInitDueIOException() {
|
||||||
|
try {
|
||||||
|
if (jmxConnector == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 检查是否正常
|
||||||
|
jmxConnector.getConnectionId();
|
||||||
|
|
||||||
|
// 如果正常则直接返回
|
||||||
|
return;
|
||||||
|
} catch (Exception e) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
// 关闭旧的
|
||||||
|
this.close();
|
||||||
|
|
||||||
|
// 重新创建
|
||||||
|
this.checkJmxConnectionAndInitIfNeed();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -241,4 +241,14 @@ public class CommonUtils {
|
|||||||
}
|
}
|
||||||
return intList;
|
return intList;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static boolean isNumeric(String str){
|
||||||
|
for (int i = 0; i < str.length(); i++){
|
||||||
|
if (!Character.isDigit(str.charAt(i))){
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
1
km-console/.gitignore
vendored
1
km-console/.gitignore
vendored
@@ -9,6 +9,5 @@ build/
|
|||||||
coverage
|
coverage
|
||||||
versions/
|
versions/
|
||||||
debug.log
|
debug.log
|
||||||
package-lock.json
|
|
||||||
yarn.lock
|
yarn.lock
|
||||||
target
|
target
|
||||||
@@ -1,43 +1,65 @@
|
|||||||
## 安装项目依赖
|
## 前提
|
||||||
|
|
||||||
- 安装 lerna
|
通常情况下,您可以通过 [本地源码启动手册](https://github.com/didi/KnowStreaming/blob/master/docs/dev_guide/%E6%9C%AC%E5%9C%B0%E6%BA%90%E7%A0%81%E5%90%AF%E5%8A%A8%E6%89%8B%E5%86%8C.md) 来打包工程。如果您需要在本地独立启动或打包前端服务,请参考以下手册。
|
||||||
|
|
||||||
|
在进行以下的步骤之前,首先确保您已经安装了 `node`。如已安装,可以通过在终端执行 `node -v` 来获取到 node 版本,项目推荐使用 `node v12` 版本运行(例如 `node v12.22.12`)。
|
||||||
|
|
||||||
|
另外,`windows` 用户请在 `git bash` 下运行下面的命令。
|
||||||
|
|
||||||
|
## 一、进入 km-console 目录
|
||||||
|
|
||||||
|
在终端执行以下步骤时,需要先进入 `KnowStreaming/km-console` 目录。
|
||||||
|
|
||||||
|
## 二、安装项目依赖(必须)
|
||||||
|
|
||||||
|
1. 安装 lerna(可选,安装后可以直接通过 lerna 的全局指令管理项目,如果不了解 lerna 可以不安装)
|
||||||
|
|
||||||
```
|
```
|
||||||
npm install -g lerna
|
npm install -g lerna
|
||||||
```
|
```
|
||||||
|
|
||||||
- 安装项目依赖
|
2. 安装项目依赖
|
||||||
|
|
||||||
```
|
```
|
||||||
npm run i
|
npm run i
|
||||||
```
|
```
|
||||||
|
|
||||||
## 启动项目
|
我们默认保留了 `package-lock.json` 文件,以防止可能的依赖包自动升级导致的问题。依赖默认会通过 taobao 镜像 `https://registry.npmmirror.com/` 服务下载。
|
||||||
|
|
||||||
|
## 三、启动项目(可选,打包构建请直接看步骤三)
|
||||||
|
|
||||||
```
|
```
|
||||||
npm run start
|
npm run start
|
||||||
```
|
```
|
||||||
|
|
||||||
### 环境信息
|
该指令会启动 `packages` 目录下的所有应用,如果需要单独启动应用,其查看下方 QA。
|
||||||
|
|
||||||
http://localhost:port
|
多集群管理应用会启动在 http://localhost:8000,系统管理应用会占用 http://localhost:8001。
|
||||||
|
请确认 `8000` 和 `8001` 端口没有被其他应用占用。
|
||||||
|
|
||||||
## 构建项目
|
后端本地服务启动在 http://localhost:8080,请求通过 webpack dev server 代理访问 8080 端口,需要启动后端服务后才能正常请求接口。
|
||||||
|
|
||||||
|
如果启动失败,可以参见另外一种本地启动方式 [本地源码启动手册](https://github.com/didi/KnowStreaming/blob/master/docs/dev_guide/%E6%9C%AC%E5%9C%B0%E6%BA%90%E7%A0%81%E5%90%AF%E5%8A%A8%E6%89%8B%E5%86%8C.md)
|
||||||
|
|
||||||
|
## 四、构建项目
|
||||||
|
|
||||||
```
|
```
|
||||||
npm run build
|
npm run build
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
项目构建成功后,会存放到 km-rest/src/main/resources/tamplates 目录下。
|
||||||
|
|
||||||
## 目录结构
|
## 目录结构
|
||||||
|
|
||||||
- packages
|
- packages
|
||||||
- layout-clusters-fe: 基座应用 & 多集群管理
|
- layout-clusters-fe: 基座应用 & 多集群管理(其余应用启动需要首先启动该应用)
|
||||||
- config-manager-fe: 子应用 - 系统管理
|
- config-manager-fe: 子应用 - 系统管理
|
||||||
- tool: 启动 & 打包脚本
|
|
||||||
- ...
|
- ...
|
||||||
|
|
||||||
## 常见问题
|
## QA
|
||||||
|
|
||||||
|
Q: 在 `km-console` 目录下执行 `npm run start` 时看不到应用构建和热加载过程?如何启动单个应用?
|
||||||
|
|
||||||
Q: 执行 `npm run start` 时看不到应用构建和热加载过程?
|
|
||||||
A: 需要到具体的应用中执行 `npm run start`,例如 `cd packages/layout-clusters-fe` 后,执行 `npm run start`。
|
A: 需要到具体的应用中执行 `npm run start`,例如 `cd packages/layout-clusters-fe` 后,执行 `npm run start`。
|
||||||
|
|
||||||
|
如遇到其它问题,请见 [faq](https://github.com/didi/KnowStreaming/blob/master/docs/user_guide/faq.md)。
|
||||||
|
|||||||
8567
km-console/package-lock.json
generated
Normal file
8567
km-console/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -17,15 +17,15 @@
|
|||||||
"eslint-plugin-react": "7.22.0",
|
"eslint-plugin-react": "7.22.0",
|
||||||
"eslint-plugin-react-hooks": "^4.2.0",
|
"eslint-plugin-react-hooks": "^4.2.0",
|
||||||
"husky": "4.3.7",
|
"husky": "4.3.7",
|
||||||
"lerna": "^4.0.0",
|
"lerna": "^5.5.0",
|
||||||
"lint-staged": "10.5.3",
|
"lint-staged": "10.5.3",
|
||||||
"prettier": "2.3.2"
|
"prettier": "2.3.2"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"i": "npm install && lerna bootstrap",
|
"i": "npm install && lerna bootstrap",
|
||||||
"clean": "rm -rf node_modules package-lock.json packages/*/node_modules packages/*/package-lock.json",
|
"clean": "rm -rf node_modules package-lock.json packages/*/node_modules packages/*/package-lock.json",
|
||||||
"start": "sh ./tool/start.sh",
|
"start": "lerna run start",
|
||||||
"build": "sh ./tool/build.sh",
|
"build": "lerna run build",
|
||||||
"changelog": "conventional-changelog -p angular -i CHANGELOG.md -s -r 0 && git add CHANGELOG.md",
|
"changelog": "conventional-changelog -p angular -i CHANGELOG.md -s -r 0 && git add CHANGELOG.md",
|
||||||
"cm": "git add . && cz"
|
"cm": "git add . && cz"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -9,5 +9,4 @@ build/
|
|||||||
coverage
|
coverage
|
||||||
versions/
|
versions/
|
||||||
debug.log
|
debug.log
|
||||||
package-lock.json
|
|
||||||
yarn.lock
|
yarn.lock
|
||||||
@@ -1,17 +1,21 @@
|
|||||||
## 使用说明
|
## 使用说明
|
||||||
|
|
||||||
### 依赖安装:
|
### 依赖安装(如在 km-console 目录下执行 npm run i 安装过依赖,这步可以省略):
|
||||||
|
|
||||||
```
|
```
|
||||||
npm install
|
npm install
|
||||||
```
|
```
|
||||||
|
|
||||||
|
注意,这种方式只会安装当前应用的依赖。如果您不了解,推荐在 km-console 目录下执行 npm run i 安装依赖。
|
||||||
|
|
||||||
### 启动:
|
### 启动:
|
||||||
|
|
||||||
```
|
```
|
||||||
npm run start
|
npm run start
|
||||||
```
|
```
|
||||||
|
|
||||||
|
该应用为子应用,启动后需要到基座应用中查看(需要启动基座应用,即 layout-clusters-fe),地址为 http://localhost:8000
|
||||||
|
|
||||||
### 构建:
|
### 构建:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,205 +0,0 @@
|
|||||||
/* eslint-disable */
|
|
||||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
|
||||||
const ProgressBarPlugin = require('progress-bar-webpack-plugin');
|
|
||||||
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
|
|
||||||
const StatsPlugin = require('stats-webpack-plugin');
|
|
||||||
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
|
|
||||||
const TerserJSPlugin = require('terser-webpack-plugin');
|
|
||||||
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
|
||||||
const HappyPack = require('happypack');
|
|
||||||
const os = require('os');
|
|
||||||
const happyThreadPool = HappyPack.ThreadPool({ size: os.cpus().length });
|
|
||||||
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
|
|
||||||
const theme = require('./theme');
|
|
||||||
var cwd = process.cwd();
|
|
||||||
|
|
||||||
const path = require('path');
|
|
||||||
const isProd = process.env.NODE_ENV === 'production';
|
|
||||||
const babelOptions = {
|
|
||||||
cacheDirectory: true,
|
|
||||||
babelrc: false,
|
|
||||||
presets: [require.resolve('@babel/preset-env'), require.resolve('@babel/preset-typescript'), require.resolve('@babel/preset-react')],
|
|
||||||
plugins: [
|
|
||||||
[require.resolve('@babel/plugin-proposal-decorators'), { legacy: true }],
|
|
||||||
[require.resolve('@babel/plugin-proposal-class-properties'), { loose: true }],
|
|
||||||
[require.resolve('@babel/plugin-proposal-private-methods'), { loose: true }],
|
|
||||||
require.resolve('@babel/plugin-proposal-export-default-from'),
|
|
||||||
require.resolve('@babel/plugin-proposal-export-namespace-from'),
|
|
||||||
require.resolve('@babel/plugin-proposal-object-rest-spread'),
|
|
||||||
require.resolve('@babel/plugin-transform-runtime'),
|
|
||||||
require.resolve('@babel/plugin-proposal-optional-chaining'), //
|
|
||||||
require.resolve('@babel/plugin-proposal-nullish-coalescing-operator'), // 解决 ?? 无法转义问题
|
|
||||||
require.resolve('@babel/plugin-proposal-numeric-separator'), // 转义 1_000_000
|
|
||||||
!isProd && require.resolve('react-refresh/babel'),
|
|
||||||
]
|
|
||||||
.filter(Boolean)
|
|
||||||
.concat([
|
|
||||||
[
|
|
||||||
'babel-plugin-import',
|
|
||||||
{
|
|
||||||
libraryName: 'antd',
|
|
||||||
style: true,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'@babel/plugin-transform-object-assign',
|
|
||||||
]),
|
|
||||||
};
|
|
||||||
module.exports = () => {
|
|
||||||
const manifestName = `manifest.json`;
|
|
||||||
const cssFileName = isProd ? '[name]-[chunkhash].css' : '[name].css';
|
|
||||||
|
|
||||||
const plugins = [
|
|
||||||
new ProgressBarPlugin(),
|
|
||||||
new CaseSensitivePathsPlugin(),
|
|
||||||
new MiniCssExtractPlugin({
|
|
||||||
filename: cssFileName,
|
|
||||||
}),
|
|
||||||
new StatsPlugin(manifestName, {
|
|
||||||
chunkModules: false,
|
|
||||||
source: true,
|
|
||||||
chunks: false,
|
|
||||||
modules: false,
|
|
||||||
assets: true,
|
|
||||||
children: false,
|
|
||||||
exclude: [/node_modules/],
|
|
||||||
}),
|
|
||||||
new HappyPack({
|
|
||||||
id: 'babel',
|
|
||||||
loaders: [
|
|
||||||
'cache-loader',
|
|
||||||
{
|
|
||||||
loader: 'babel-loader',
|
|
||||||
options: babelOptions,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
threadPool: happyThreadPool,
|
|
||||||
}),
|
|
||||||
!isProd &&
|
|
||||||
new ReactRefreshWebpackPlugin({
|
|
||||||
overlay: false,
|
|
||||||
}),
|
|
||||||
// new BundleAnalyzerPlugin({
|
|
||||||
// analyzerPort: 8889
|
|
||||||
// }),
|
|
||||||
].filter(Boolean);
|
|
||||||
if (isProd) {
|
|
||||||
plugins.push(new CleanWebpackPlugin());
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
externals: isProd
|
|
||||||
? [
|
|
||||||
/^react$/,
|
|
||||||
/^react\/lib.*/,
|
|
||||||
/^react-dom$/,
|
|
||||||
/.*react-dom.*/,
|
|
||||||
/^single-spa$/,
|
|
||||||
/^single-spa-react$/,
|
|
||||||
/^moment$/,
|
|
||||||
/^antd$/,
|
|
||||||
/^lodash$/,
|
|
||||||
/^react-router$/,
|
|
||||||
/^react-router-dom$/,
|
|
||||||
]
|
|
||||||
: [],
|
|
||||||
resolve: {
|
|
||||||
symlinks: false,
|
|
||||||
extensions: ['.web.jsx', '.web.js', '.ts', '.tsx', '.js', '.jsx', '.json'],
|
|
||||||
alias: {
|
|
||||||
// '@pkgs': path.resolve(cwd, 'src/packages'),
|
|
||||||
'@pkgs': path.resolve(cwd, './node_modules/@didi/d1-packages'),
|
|
||||||
'@cpts': path.resolve(cwd, 'src/components'),
|
|
||||||
'@interface': path.resolve(cwd, 'src/interface'),
|
|
||||||
'@apis': path.resolve(cwd, 'src/api'),
|
|
||||||
react: path.resolve('./node_modules/react'),
|
|
||||||
actions: path.resolve(cwd, 'src/actions'),
|
|
||||||
lib: path.resolve(cwd, 'src/lib'),
|
|
||||||
constants: path.resolve(cwd, 'src/constants'),
|
|
||||||
components: path.resolve(cwd, 'src/components'),
|
|
||||||
container: path.resolve(cwd, 'src/container'),
|
|
||||||
api: path.resolve(cwd, 'src/api'),
|
|
||||||
assets: path.resolve(cwd, 'src/assets'),
|
|
||||||
mobxStore: path.resolve(cwd, 'src/mobxStore'),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
plugins,
|
|
||||||
module: {
|
|
||||||
rules: [
|
|
||||||
{
|
|
||||||
parser: { system: false },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
test: /\.(js|jsx|ts|tsx)$/,
|
|
||||||
exclude: /node_modules\/(?!react-intl|@didi\/dcloud-design)/,
|
|
||||||
use: [
|
|
||||||
{
|
|
||||||
loader: 'happypack/loader?id=babel',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
test: /\.(png|svg|jpeg|jpg|gif|ttf|woff|woff2|eot|pdf)$/,
|
|
||||||
use: [
|
|
||||||
{
|
|
||||||
loader: 'file-loader',
|
|
||||||
options: {
|
|
||||||
name: '[name].[ext]',
|
|
||||||
outputPath: './assets/image/',
|
|
||||||
esModule: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
test: /\.(css|less)$/,
|
|
||||||
use: [
|
|
||||||
{
|
|
||||||
loader: MiniCssExtractPlugin.loader,
|
|
||||||
},
|
|
||||||
'css-loader',
|
|
||||||
{
|
|
||||||
loader: 'less-loader',
|
|
||||||
options: {
|
|
||||||
javascriptEnabled: true,
|
|
||||||
modifyVars: theme,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
optimization: Object.assign(
|
|
||||||
{
|
|
||||||
splitChunks: {
|
|
||||||
cacheGroups: {
|
|
||||||
vendor: {
|
|
||||||
test: /[\\/]node_modules[\\/]/,
|
|
||||||
chunks: 'all',
|
|
||||||
name: 'vendor',
|
|
||||||
priority: 10,
|
|
||||||
enforce: true,
|
|
||||||
minChunks: 1,
|
|
||||||
maxSize: 3500000,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
isProd
|
|
||||||
? {
|
|
||||||
minimizer: [
|
|
||||||
new TerserJSPlugin({
|
|
||||||
cache: true,
|
|
||||||
sourceMap: true,
|
|
||||||
}),
|
|
||||||
new OptimizeCSSAssetsPlugin({}),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
: {}
|
|
||||||
),
|
|
||||||
devtool: isProd ? 'cheap-module-source-map' : 'source-map',
|
|
||||||
node: {
|
|
||||||
fs: 'empty',
|
|
||||||
net: 'empty',
|
|
||||||
tls: 'empty',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
};
|
|
||||||
132
km-console/packages/config-manager-fe/config/webpack.common.js
Normal file
132
km-console/packages/config-manager-fe/config/webpack.common.js
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
const path = require('path');
|
||||||
|
const webpack = require('webpack');
|
||||||
|
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||||
|
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||||
|
const ProgressBarPlugin = require('progress-bar-webpack-plugin');
|
||||||
|
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
|
||||||
|
const StatsPlugin = require('stats-webpack-plugin');
|
||||||
|
const HappyPack = require('happypack');
|
||||||
|
const os = require('os');
|
||||||
|
const happyThreadPool = HappyPack.ThreadPool({ size: os.cpus().length });
|
||||||
|
const theme = require('./theme');
|
||||||
|
const pkgJson = require('../package');
|
||||||
|
|
||||||
|
const devMode = process.env.NODE_ENV === 'development';
|
||||||
|
const babelOptions = {
|
||||||
|
cacheDirectory: true,
|
||||||
|
babelrc: false,
|
||||||
|
presets: [require.resolve('@babel/preset-env'), require.resolve('@babel/preset-typescript'), require.resolve('@babel/preset-react')],
|
||||||
|
plugins: [
|
||||||
|
[require.resolve('@babel/plugin-proposal-decorators'), { legacy: true }],
|
||||||
|
[require.resolve('@babel/plugin-proposal-class-properties'), { loose: true }],
|
||||||
|
[require.resolve('@babel/plugin-proposal-private-methods'), { loose: true }],
|
||||||
|
[require.resolve('@babel/plugin-proposal-private-property-in-object'), { loose: true }],
|
||||||
|
require.resolve('@babel/plugin-proposal-export-default-from'),
|
||||||
|
require.resolve('@babel/plugin-proposal-export-namespace-from'),
|
||||||
|
require.resolve('@babel/plugin-proposal-object-rest-spread'),
|
||||||
|
require.resolve('@babel/plugin-transform-runtime'),
|
||||||
|
require.resolve('@babel/plugin-proposal-optional-chaining'), //
|
||||||
|
require.resolve('@babel/plugin-proposal-nullish-coalescing-operator'), // 解决 ?? 无法转义问题
|
||||||
|
require.resolve('@babel/plugin-proposal-numeric-separator'), // 转义 1_000_000
|
||||||
|
devMode && require.resolve('react-refresh/babel'),
|
||||||
|
].filter(Boolean),
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
entry: {
|
||||||
|
[pkgJson.ident]: ['./src/index.tsx'],
|
||||||
|
},
|
||||||
|
resolve: {
|
||||||
|
symlinks: false,
|
||||||
|
extensions: ['.web.jsx', '.web.js', '.ts', '.tsx', '.js', '.jsx', '.json'],
|
||||||
|
alias: {
|
||||||
|
'@src': path.resolve(process.cwd(), 'src'),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
plugins: [
|
||||||
|
new ProgressBarPlugin(),
|
||||||
|
new CaseSensitivePathsPlugin(),
|
||||||
|
new StatsPlugin('manifest.json', {
|
||||||
|
chunkModules: false,
|
||||||
|
source: true,
|
||||||
|
chunks: false,
|
||||||
|
modules: false,
|
||||||
|
assets: true,
|
||||||
|
children: false,
|
||||||
|
exclude: [/node_modules/],
|
||||||
|
}),
|
||||||
|
new HappyPack({
|
||||||
|
id: 'babel',
|
||||||
|
loaders: [
|
||||||
|
'cache-loader',
|
||||||
|
{
|
||||||
|
loader: 'babel-loader',
|
||||||
|
options: babelOptions,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
threadPool: happyThreadPool,
|
||||||
|
}),
|
||||||
|
new webpack.DefinePlugin({
|
||||||
|
'process.env': {
|
||||||
|
NODE_ENV: JSON.stringify(process.env.NODE_ENV),
|
||||||
|
RUN_ENV: JSON.stringify(process.env.RUN_ENV),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
new HtmlWebpackPlugin({
|
||||||
|
meta: {
|
||||||
|
manifest: 'manifest.json',
|
||||||
|
},
|
||||||
|
template: './src/index.html',
|
||||||
|
inject: 'body',
|
||||||
|
}),
|
||||||
|
].filter(Boolean),
|
||||||
|
module: {
|
||||||
|
rules: [
|
||||||
|
{
|
||||||
|
parser: { system: false },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: /\.(js|jsx|ts|tsx)$/,
|
||||||
|
exclude: /node_modules\/(?!react-intl|@didi\/dcloud-design)/,
|
||||||
|
use: [
|
||||||
|
{
|
||||||
|
loader: 'happypack/loader?id=babel',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: /\.(png|svg|jpeg|jpg|gif|ttf|woff|woff2|eot|pdf)$/,
|
||||||
|
use: [
|
||||||
|
{
|
||||||
|
loader: 'file-loader',
|
||||||
|
options: {
|
||||||
|
name: '[name].[ext]',
|
||||||
|
outputPath: './assets/image/',
|
||||||
|
esModule: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: /\.(css|less)$/,
|
||||||
|
use: [
|
||||||
|
MiniCssExtractPlugin.loader,
|
||||||
|
'css-loader',
|
||||||
|
{
|
||||||
|
loader: 'less-loader',
|
||||||
|
options: {
|
||||||
|
javascriptEnabled: true,
|
||||||
|
modifyVars: theme,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
node: {
|
||||||
|
fs: 'empty',
|
||||||
|
net: 'empty',
|
||||||
|
tls: 'empty',
|
||||||
|
},
|
||||||
|
stats: 'errors-warnings',
|
||||||
|
};
|
||||||
35
km-console/packages/config-manager-fe/config/webpack.dev.js
Normal file
35
km-console/packages/config-manager-fe/config/webpack.dev.js
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||||
|
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
|
||||||
|
const pkgJson = require('../package');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
mode: 'development',
|
||||||
|
plugins: [
|
||||||
|
new MiniCssExtractPlugin(),
|
||||||
|
new ReactRefreshWebpackPlugin({
|
||||||
|
overlay: false,
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
devServer: {
|
||||||
|
host: '127.0.0.1',
|
||||||
|
port: pkgJson.port,
|
||||||
|
hot: true,
|
||||||
|
open: false,
|
||||||
|
publicPath: `http://localhost:${pkgJson.port}/${pkgJson.ident}/`,
|
||||||
|
inline: true,
|
||||||
|
disableHostCheck: true,
|
||||||
|
historyApiFallback: true,
|
||||||
|
headers: {
|
||||||
|
'Access-Control-Allow-Origin': '*',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
output: {
|
||||||
|
path: '/',
|
||||||
|
publicPath: `http://localhost:${pkgJson.port}/${pkgJson.ident}/`,
|
||||||
|
library: pkgJson.ident,
|
||||||
|
libraryTarget: 'amd',
|
||||||
|
filename: '[name].js',
|
||||||
|
chunkFilename: '[name].js',
|
||||||
|
},
|
||||||
|
devtool: 'cheap-module-eval-source-map',
|
||||||
|
};
|
||||||
59
km-console/packages/config-manager-fe/config/webpack.prod.js
Normal file
59
km-console/packages/config-manager-fe/config/webpack.prod.js
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
const path = require('path');
|
||||||
|
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||||
|
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
|
||||||
|
const TerserJSPlugin = require('terser-webpack-plugin');
|
||||||
|
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
||||||
|
const pkgJson = require('../package');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
mode: 'production',
|
||||||
|
externals: [
|
||||||
|
/^react$/,
|
||||||
|
/^react\/lib.*/,
|
||||||
|
/^react-dom$/,
|
||||||
|
/.*react-dom.*/,
|
||||||
|
/^single-spa$/,
|
||||||
|
/^single-spa-react$/,
|
||||||
|
/^moment$/,
|
||||||
|
/^lodash$/,
|
||||||
|
/^react-router$/,
|
||||||
|
/^react-router-dom$/,
|
||||||
|
],
|
||||||
|
plugins: [
|
||||||
|
new CleanWebpackPlugin(),
|
||||||
|
new MiniCssExtractPlugin({
|
||||||
|
filename: '[name]-[chunkhash].css',
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
output: {
|
||||||
|
path: path.resolve(process.cwd(), `../../../km-rest/src/main/resources/templates/${pkgJson.ident}`),
|
||||||
|
publicPath: `${process.env.PUBLIC_PATH}/${pkgJson.ident}/`,
|
||||||
|
library: pkgJson.ident,
|
||||||
|
libraryTarget: 'amd',
|
||||||
|
filename: '[name]-[chunkhash].js',
|
||||||
|
chunkFilename: '[name]-[chunkhash].js',
|
||||||
|
},
|
||||||
|
optimization: {
|
||||||
|
splitChunks: {
|
||||||
|
cacheGroups: {
|
||||||
|
vendor: {
|
||||||
|
test: /[\\/]node_modules[\\/]/,
|
||||||
|
chunks: 'all',
|
||||||
|
name: 'vendor',
|
||||||
|
priority: 10,
|
||||||
|
enforce: true,
|
||||||
|
minChunks: 1,
|
||||||
|
maxSize: 3500000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
minimizer: [
|
||||||
|
new TerserJSPlugin({
|
||||||
|
cache: true,
|
||||||
|
sourceMap: true,
|
||||||
|
}),
|
||||||
|
new OptimizeCSSAssetsPlugin({}),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
devtool: 'none',
|
||||||
|
};
|
||||||
13847
km-console/packages/config-manager-fe/package-lock.json
generated
Normal file
13847
km-console/packages/config-manager-fe/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -18,12 +18,14 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||||
"start": "cross-env NODE_ENV=development webpack-dev-server",
|
"start": "cross-env NODE_ENV=development webpack-dev-server",
|
||||||
"build": "rm -rf ../../pub/layout & cross-env NODE_ENV=production webpack --max_old_space_size=8000"
|
"build": "cross-env NODE_ENV=production webpack --max_old_space_size=8000"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@knowdesign/icons": "^1.0.0",
|
||||||
"babel-preset-react-app": "^10.0.0",
|
"babel-preset-react-app": "^10.0.0",
|
||||||
"classnames": "^2.2.6",
|
"classnames": "^2.2.6",
|
||||||
"dotenv": "^16.0.1",
|
"dotenv": "^16.0.1",
|
||||||
|
"knowdesign": "1.3.7",
|
||||||
"less": "^3.9.0",
|
"less": "^3.9.0",
|
||||||
"lodash": "^4.17.11",
|
"lodash": "^4.17.11",
|
||||||
"mobx": "4.15.7",
|
"mobx": "4.15.7",
|
||||||
@@ -36,8 +38,7 @@
|
|||||||
"react-intl": "^3.2.1",
|
"react-intl": "^3.2.1",
|
||||||
"react-router-cache-route": "^1.11.1",
|
"react-router-cache-route": "^1.11.1",
|
||||||
"single-spa": "^5.8.0",
|
"single-spa": "^5.8.0",
|
||||||
"single-spa-react": "^2.14.0",
|
"single-spa-react": "^2.14.0"
|
||||||
"knowdesign": "1.3.7"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@ant-design/icons": "^4.6.2",
|
"@ant-design/icons": "^4.6.2",
|
||||||
@@ -58,6 +59,7 @@
|
|||||||
"@pmmmwh/react-refresh-webpack-plugin": "^0.5.1",
|
"@pmmmwh/react-refresh-webpack-plugin": "^0.5.1",
|
||||||
"@types/lodash": "^4.14.138",
|
"@types/lodash": "^4.14.138",
|
||||||
"@types/react-dom": "^17.0.5",
|
"@types/react-dom": "^17.0.5",
|
||||||
|
"@types/react-router": "5.1.18",
|
||||||
"@types/react-router-dom": "^5.3.3",
|
"@types/react-router-dom": "^5.3.3",
|
||||||
"@types/single-spa-react": "^2.12.0",
|
"@types/single-spa-react": "^2.12.0",
|
||||||
"@typescript-eslint/eslint-plugin": "4.13.0",
|
"@typescript-eslint/eslint-plugin": "4.13.0",
|
||||||
|
|||||||
@@ -2,8 +2,8 @@ import React from 'react';
|
|||||||
import { BrowserRouter as Router, Redirect, Switch } from 'react-router-dom';
|
import { BrowserRouter as Router, Redirect, Switch } from 'react-router-dom';
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import './constants/axiosConfig';
|
import './constants/axiosConfig';
|
||||||
import dantdZhCN from 'knowdesign/lib/locale/zh_CN';
|
import dantdZhCN from 'knowdesign/es/locale/zh_CN';
|
||||||
import dantdEnUS from 'knowdesign/lib/locale/en_US';
|
import dantdEnUS from 'knowdesign/es/locale/en_US';
|
||||||
import intlZhCN from './locales/zh';
|
import intlZhCN from './locales/zh';
|
||||||
import intlEnUS from './locales/en';
|
import intlEnUS from './locales/en';
|
||||||
import { AppContainer, RouteGuard, DProLayout } from 'knowdesign';
|
import { AppContainer, RouteGuard, DProLayout } from 'knowdesign';
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { DownOutlined } from '@ant-design/icons';
|
import { DownOutlined } from '@ant-design/icons';
|
||||||
import { Popover } from 'knowdesign';
|
import { Popover } from 'knowdesign';
|
||||||
import { TooltipPlacement } from 'knowdesign/lib/basic/tooltip';
|
import { TooltipPlacement } from 'knowdesign/es/basic/tooltip';
|
||||||
import React, { useState, useRef, useEffect } from 'react';
|
import React, { useState, useRef, useEffect } from 'react';
|
||||||
import './index.less';
|
import './index.less';
|
||||||
|
|
||||||
@@ -90,8 +90,9 @@ export default (props: PropsType) => {
|
|||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
key={i}
|
key={i}
|
||||||
className={`container-item ${curState.calculated ? (curState.isHideExpandNode ? 'show' : i >= curState.endI ? 'hide' : 'show') : ''
|
className={`container-item ${
|
||||||
}`}
|
curState.calculated ? (curState.isHideExpandNode ? 'show' : i >= curState.endI ? 'hide' : 'show') : ''
|
||||||
|
}`}
|
||||||
>
|
>
|
||||||
{item}
|
{item}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -22,6 +22,20 @@
|
|||||||
display: flex;
|
display: flex;
|
||||||
justify-content: space-between;
|
justify-content: space-between;
|
||||||
margin-bottom: 12px;
|
margin-bottom: 12px;
|
||||||
|
.left,
|
||||||
|
.right {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
.left .refresh-icon {
|
||||||
|
font-size: 20px;
|
||||||
|
color: #74788d;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.right .search-input {
|
||||||
|
width: 248px;
|
||||||
|
margin-right: 8px;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -35,11 +35,20 @@ serviceInstance.interceptors.request.use(
|
|||||||
// 响应拦截
|
// 响应拦截
|
||||||
serviceInstance.interceptors.response.use(
|
serviceInstance.interceptors.response.use(
|
||||||
(config: any) => {
|
(config: any) => {
|
||||||
return config.data;
|
const res: { code: number; message: string; data: any } = config.data;
|
||||||
|
if (res.code !== 0 && res.code !== 200) {
|
||||||
|
const desc = res.message;
|
||||||
|
notification.error({
|
||||||
|
message: desc,
|
||||||
|
duration: 3,
|
||||||
|
});
|
||||||
|
throw res;
|
||||||
|
}
|
||||||
|
return res;
|
||||||
},
|
},
|
||||||
(err: any) => {
|
(err: any) => {
|
||||||
const config = err.config;
|
const config = err?.config;
|
||||||
if (!config || !config.retryTimes) return dealResponse(err, config.customNotification);
|
if (!config || !config.retryTimes) return dealResponse(err);
|
||||||
const { __retryCount = 0, retryDelay = 300, retryTimes } = config;
|
const { __retryCount = 0, retryDelay = 300, retryTimes } = config;
|
||||||
config.__retryCount = __retryCount;
|
config.__retryCount = __retryCount;
|
||||||
if (__retryCount >= retryTimes) {
|
if (__retryCount >= retryTimes) {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import React, { useLayoutEffect } from 'react';
|
import React, { useLayoutEffect } from 'react';
|
||||||
import { Utils, AppContainer } from 'knowdesign';
|
import { Utils, AppContainer } from 'knowdesign';
|
||||||
import { goLogin } from 'constants/axiosConfig';
|
import { goLogin } from '@src/constants/axiosConfig';
|
||||||
|
|
||||||
// 权限对应表
|
// 权限对应表
|
||||||
export enum ConfigPermissionMap {
|
export enum ConfigPermissionMap {
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ import {
|
|||||||
AppContainer,
|
AppContainer,
|
||||||
Utils,
|
Utils,
|
||||||
} from 'knowdesign';
|
} from 'knowdesign';
|
||||||
|
import { IconFont } from '@knowdesign/icons';
|
||||||
import { PlusOutlined } from '@ant-design/icons';
|
import { PlusOutlined } from '@ant-design/icons';
|
||||||
import moment from 'moment';
|
import moment from 'moment';
|
||||||
// 引入代码编辑器
|
// 引入代码编辑器
|
||||||
@@ -26,8 +27,8 @@ import 'codemirror/addon/selection/active-line';
|
|||||||
import 'codemirror/addon/edit/closebrackets';
|
import 'codemirror/addon/edit/closebrackets';
|
||||||
require('codemirror/mode/xml/xml');
|
require('codemirror/mode/xml/xml');
|
||||||
require('codemirror/mode/javascript/javascript');
|
require('codemirror/mode/javascript/javascript');
|
||||||
import api from 'api';
|
import api from '@src/api';
|
||||||
import { defaultPagination } from 'constants/common';
|
import { defaultPagination } from '@src/constants/common';
|
||||||
import TypicalListCard from '../../components/TypicalListCard';
|
import TypicalListCard from '../../components/TypicalListCard';
|
||||||
import { ConfigPermissionMap } from '../CommonConfig';
|
import { ConfigPermissionMap } from '../CommonConfig';
|
||||||
import { ConfigOperate, ConfigProps } from './config';
|
import { ConfigOperate, ConfigProps } from './config';
|
||||||
@@ -384,7 +385,7 @@ export default () => {
|
|||||||
const onDelete = (record: ConfigProps) => {
|
const onDelete = (record: ConfigProps) => {
|
||||||
confirm({
|
confirm({
|
||||||
title: '确定删除配置吗?',
|
title: '确定删除配置吗?',
|
||||||
content: `配置⌈${record.valueName}⌋${record.status === 1 ? '为启用状态,无法删除' : ''}`,
|
content: `配置 [${record.valueName}] ${record.status === 1 ? '为启用状态,无法删除' : ''}`,
|
||||||
centered: true,
|
centered: true,
|
||||||
okText: '删除',
|
okText: '删除',
|
||||||
okType: 'primary',
|
okType: 'primary',
|
||||||
@@ -398,9 +399,11 @@ export default () => {
|
|||||||
},
|
},
|
||||||
maskClosable: true,
|
maskClosable: true,
|
||||||
onOk() {
|
onOk() {
|
||||||
return request(api.editConfig, {
|
return request(api.delConfig, {
|
||||||
method: 'POST',
|
method: 'DELETE',
|
||||||
data: record.id,
|
params: {
|
||||||
|
id: record.id,
|
||||||
|
},
|
||||||
}).then((_) => {
|
}).then((_) => {
|
||||||
message.success('删除成功');
|
message.success('删除成功');
|
||||||
getConfigList();
|
getConfigList();
|
||||||
@@ -431,22 +434,28 @@ export default () => {
|
|||||||
<TypicalListCard title="配置管理">
|
<TypicalListCard title="配置管理">
|
||||||
<div className="config-manage-page">
|
<div className="config-manage-page">
|
||||||
<div className="operate-bar">
|
<div className="operate-bar">
|
||||||
<Form form={form} layout="inline" onFinish={() => getConfigList({ page: 1 })}>
|
<div className="left">
|
||||||
<Form.Item name="valueGroup">
|
<div className="refresh-icon" onClick={() => getConfigList()}>
|
||||||
<Select style={{ width: 180 }} placeholder="请选择模块" options={configGroupList} />
|
<IconFont className="icon" type="icon-shuaxin1" />
|
||||||
</Form.Item>
|
</div>
|
||||||
<Form.Item name="valueName">
|
<Divider type="vertical" style={{ height: 20, top: 0 }} />
|
||||||
<Input style={{ width: 180 }} placeholder="请输入配置键" />
|
<Form form={form} layout="inline" onFinish={() => getConfigList({ page: 1 })}>
|
||||||
</Form.Item>
|
<Form.Item name="valueGroup">
|
||||||
<Form.Item name="memo">
|
<Select style={{ width: 180 }} placeholder="请选择模块" options={configGroupList} />
|
||||||
<Input style={{ width: 180 }} placeholder="请输入描述" />
|
</Form.Item>
|
||||||
</Form.Item>
|
<Form.Item name="valueName">
|
||||||
<Form.Item>
|
<Input style={{ width: 180 }} placeholder="请输入配置键" />
|
||||||
<Button type="primary" ghost htmlType="submit">
|
</Form.Item>
|
||||||
查询
|
<Form.Item name="memo">
|
||||||
</Button>
|
<Input style={{ width: 180 }} placeholder="请输入描述" />
|
||||||
</Form.Item>
|
</Form.Item>
|
||||||
</Form>
|
<Form.Item>
|
||||||
|
<Button type="primary" ghost htmlType="submit">
|
||||||
|
查询
|
||||||
|
</Button>
|
||||||
|
</Form.Item>
|
||||||
|
</Form>
|
||||||
|
</div>
|
||||||
{global.hasPermission && global.hasPermission(ConfigPermissionMap.CONFIG_ADD) ? (
|
{global.hasPermission && global.hasPermission(ConfigPermissionMap.CONFIG_ADD) ? (
|
||||||
<Button
|
<Button
|
||||||
type="primary"
|
type="primary"
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
import React, { useEffect, useState } from 'react';
|
import React, { useEffect, useState } from 'react';
|
||||||
import { Button, Form, Input, Select, ProTable, DatePicker, Utils, Tooltip } from 'knowdesign';
|
import { Button, Form, Input, Select, ProTable, DatePicker, Utils, Tooltip, Divider } from 'knowdesign';
|
||||||
import api from 'api';
|
import { IconFont } from '@knowdesign/icons';
|
||||||
import { defaultPagination } from 'constants/common';
|
import api from '@src/api';
|
||||||
|
import { defaultPagination } from '@src/constants/common';
|
||||||
import TypicalListCard from '../../components/TypicalListCard';
|
import TypicalListCard from '../../components/TypicalListCard';
|
||||||
import './index.less';
|
import './index.less';
|
||||||
import moment from 'moment';
|
import moment from 'moment';
|
||||||
@@ -119,25 +120,32 @@ export default () => {
|
|||||||
<>
|
<>
|
||||||
<TypicalListCard title="操作记录">
|
<TypicalListCard title="操作记录">
|
||||||
<div className="operate-bar">
|
<div className="operate-bar">
|
||||||
<Form form={form} layout="inline" onFinish={() => getData({ page: 1 })}>
|
<div className="left">
|
||||||
<Form.Item name="targetType">
|
<div className="refresh-icon" onClick={() => getData()}>
|
||||||
<Select placeholder="请选择模块" options={configGroupList} style={{ width: 160 }} />
|
<IconFont className="icon" type="icon-shuaxin1" />
|
||||||
</Form.Item>
|
</div>
|
||||||
<Form.Item name="target">
|
<Divider type="vertical" style={{ height: 20, top: 0 }} />
|
||||||
<Input placeholder="请输入操作对象" />
|
|
||||||
</Form.Item>
|
<Form form={form} layout="inline" onFinish={() => getData({ page: 1 })}>
|
||||||
<Form.Item name="detail">
|
<Form.Item name="targetType">
|
||||||
<Input placeholder="请输入操作内容" />
|
<Select placeholder="请选择模块" options={configGroupList} style={{ width: 160 }} />
|
||||||
</Form.Item>
|
</Form.Item>
|
||||||
<Form.Item name="time">
|
<Form.Item name="target">
|
||||||
<RangePicker showTime />
|
<Input placeholder="请输入操作对象" />
|
||||||
</Form.Item>
|
</Form.Item>
|
||||||
<Form.Item>
|
<Form.Item name="detail">
|
||||||
<Button type="primary" ghost htmlType="submit">
|
<Input placeholder="请输入操作内容" />
|
||||||
查询
|
</Form.Item>
|
||||||
</Button>
|
<Form.Item name="time">
|
||||||
</Form.Item>
|
<RangePicker showTime />
|
||||||
</Form>
|
</Form.Item>
|
||||||
|
<Form.Item>
|
||||||
|
<Button type="primary" ghost htmlType="submit">
|
||||||
|
查询
|
||||||
|
</Button>
|
||||||
|
</Form.Item>
|
||||||
|
</Form>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<ProTable
|
<ProTable
|
||||||
|
|||||||
@@ -73,12 +73,12 @@ const CheckboxGroupContainer = (props: CheckboxGroupType) => {
|
|||||||
</Checkbox>
|
</Checkbox>
|
||||||
</div>
|
</div>
|
||||||
<Checkbox.Group disabled={disabled} style={{ width: '100%' }} value={checkedList} onChange={onCheckedChange}>
|
<Checkbox.Group disabled={disabled} style={{ width: '100%' }} value={checkedList} onChange={onCheckedChange}>
|
||||||
<Row gutter={[34, 10]}>
|
<Row gutter={[10, 10]}>
|
||||||
{options.map((option) => {
|
{options.map((option) => {
|
||||||
return (
|
return (
|
||||||
<Col span={8} key={option.value}>
|
<Col span={8} key={option.value}>
|
||||||
<Checkbox value={option.value} className="checkbox-content-ellipsis">
|
<Checkbox value={option.value} className="checkbox-content-ellipsis">
|
||||||
{option.label}
|
{option.label.replace('Cluster-Load', '')}
|
||||||
</Checkbox>
|
</Checkbox>
|
||||||
</Col>
|
</Col>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -20,10 +20,10 @@ import {
|
|||||||
IconFont,
|
IconFont,
|
||||||
} from 'knowdesign';
|
} from 'knowdesign';
|
||||||
import moment from 'moment';
|
import moment from 'moment';
|
||||||
import { CloseOutlined, LoadingOutlined, PlusOutlined } from '@ant-design/icons';
|
import { LoadingOutlined, PlusOutlined } from '@ant-design/icons';
|
||||||
import { defaultPagination } from 'constants/common';
|
import { defaultPagination } from '@src/constants/common';
|
||||||
import { RoleProps, PermissionNode, AssignUser, RoleOperate, FormItemPermission } from './config';
|
import { RoleProps, PermissionNode, AssignUser, RoleOperate, FormItemPermission } from './config';
|
||||||
import api from 'api';
|
import api from '@src/api';
|
||||||
import CheckboxGroupContainer from './CheckboxGroupContainer';
|
import CheckboxGroupContainer from './CheckboxGroupContainer';
|
||||||
import { ConfigPermissionMap } from '../CommonConfig';
|
import { ConfigPermissionMap } from '../CommonConfig';
|
||||||
|
|
||||||
@@ -50,11 +50,21 @@ const RoleDetailAndUpdate = forwardRef((props, ref): JSX.Element => {
|
|||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const globalPermissions = global.permissions;
|
const globalPermissions = global.permissions;
|
||||||
if (globalPermissions && globalPermissions.length) {
|
if (globalPermissions && globalPermissions.length) {
|
||||||
const sysPermissions = globalPermissions.map((sys: PermissionNode) => ({
|
const sysPermissions = globalPermissions.map((sys: PermissionNode) => {
|
||||||
id: sys.id,
|
const result = {
|
||||||
name: sys.permissionName,
|
id: sys.id,
|
||||||
options: sys.childList.map((node) => ({ label: node.permissionName, value: node.id })),
|
name: sys.permissionName,
|
||||||
}));
|
essentialPermission: undefined,
|
||||||
|
options: [],
|
||||||
|
};
|
||||||
|
result.options = sys.childList.map((node) => {
|
||||||
|
if (node.permissionName === '多集群管理查看' || node.permissionName === '系统管理查看') {
|
||||||
|
result.essentialPermission = { label: node.permissionName, value: node.id };
|
||||||
|
}
|
||||||
|
return { label: node.permissionName, value: node.id };
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
});
|
||||||
setPermissions(sysPermissions);
|
setPermissions(sysPermissions);
|
||||||
}
|
}
|
||||||
}, [global]);
|
}, [global]);
|
||||||
@@ -79,7 +89,10 @@ const RoleDetailAndUpdate = forwardRef((props, ref): JSX.Element => {
|
|||||||
form.validateFields().then((formData) => {
|
form.validateFields().then((formData) => {
|
||||||
formData.permissionIdList.forEach((arr, i) => {
|
formData.permissionIdList.forEach((arr, i) => {
|
||||||
// 如果分配的系统下的子权限,自动赋予该系统的权限
|
// 如果分配的系统下的子权限,自动赋予该系统的权限
|
||||||
if (arr !== null && arr.length) {
|
if (!Array.isArray(arr)) {
|
||||||
|
arr = [];
|
||||||
|
}
|
||||||
|
if (arr?.length) {
|
||||||
arr.push(permissions[i].id);
|
arr.push(permissions[i].id);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -209,10 +222,20 @@ const RoleDetailAndUpdate = forwardRef((props, ref): JSX.Element => {
|
|||||||
<Form.Item
|
<Form.Item
|
||||||
label="分配权限"
|
label="分配权限"
|
||||||
name="permissionIdList"
|
name="permissionIdList"
|
||||||
|
required
|
||||||
rules={[
|
rules={[
|
||||||
() => ({
|
() => ({
|
||||||
validator(_, value) {
|
validator(_, value) {
|
||||||
if (Array.isArray(value) && value.some((item) => !!item.length)) {
|
if (Array.isArray(value) && value.some((item) => !!item?.length)) {
|
||||||
|
const errs = [];
|
||||||
|
value.forEach((arr, i) => {
|
||||||
|
if (arr?.length && !arr.includes(permissions[i].essentialPermission.value)) {
|
||||||
|
errs.push(`[${permissions[i].essentialPermission.label}]`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (errs.length) {
|
||||||
|
return Promise.reject(`您必须分配 ${errs.join(' 和 ')} 权限`);
|
||||||
|
}
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
}
|
}
|
||||||
return Promise.reject(new Error('请为角色至少分配一项权限'));
|
return Promise.reject(new Error('请为角色至少分配一项权限'));
|
||||||
@@ -588,38 +611,45 @@ export default (props: { curTabKey: string }): JSX.Element => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="operate-bar-right">
|
<div className="operate-bar">
|
||||||
<Input
|
<div className="left">
|
||||||
className="search-input"
|
<div className="refresh-icon" onClick={() => getRoleList()}>
|
||||||
suffix={
|
<IconFont className="icon" type="icon-shuaxin1" />
|
||||||
<IconFont
|
</div>
|
||||||
type="icon-fangdajing"
|
</div>
|
||||||
onClick={(_) => {
|
<div className="right">
|
||||||
setSearchKeywords(searchKeywordsInput);
|
<Input
|
||||||
}}
|
className="search-input"
|
||||||
style={{ fontSize: '16px' }}
|
suffix={
|
||||||
/>
|
<IconFont
|
||||||
}
|
type="icon-fangdajing"
|
||||||
placeholder="请输入角色名称"
|
onClick={(_) => {
|
||||||
value={searchKeywordsInput}
|
setSearchKeywords(searchKeywordsInput);
|
||||||
onPressEnter={(_) => {
|
}}
|
||||||
setSearchKeywords(searchKeywordsInput);
|
style={{ fontSize: '16px' }}
|
||||||
}}
|
/>
|
||||||
onChange={(e) => {
|
}
|
||||||
setSearchKeywordsInput(e.target.value);
|
placeholder="请输入角色名称"
|
||||||
}}
|
value={searchKeywordsInput}
|
||||||
/>
|
onPressEnter={(_) => {
|
||||||
{global.hasPermission && global.hasPermission(ConfigPermissionMap.ROLE_ADD) ? (
|
setSearchKeywords(searchKeywordsInput);
|
||||||
<Button
|
}}
|
||||||
type="primary"
|
onChange={(e) => {
|
||||||
icon={<PlusOutlined />}
|
setSearchKeywordsInput(e.target.value);
|
||||||
onClick={() => detailRef.current.onOpen(true, RoleOperate.Add, getRoleList, undefined)}
|
}}
|
||||||
>
|
/>
|
||||||
新增角色
|
{global.hasPermission && global.hasPermission(ConfigPermissionMap.ROLE_ADD) ? (
|
||||||
</Button>
|
<Button
|
||||||
) : (
|
type="primary"
|
||||||
<></>
|
icon={<PlusOutlined />}
|
||||||
)}
|
onClick={() => detailRef.current.onOpen(true, RoleOperate.Add, getRoleList, undefined)}
|
||||||
|
>
|
||||||
|
新增角色
|
||||||
|
</Button>
|
||||||
|
) : (
|
||||||
|
<></>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<ProTable
|
<ProTable
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import React, { forwardRef, useCallback, useEffect, useImperativeHandle, useRef, useState } from 'react';
|
import React, { forwardRef, useCallback, useEffect, useImperativeHandle, useRef, useState } from 'react';
|
||||||
import { Form, ProTable, Select, Button, Input, Modal, message, Drawer, Space, Divider, AppContainer, Utils } from 'knowdesign';
|
import { Form, ProTable, Select, Button, Input, Modal, message, Drawer, Space, Divider, AppContainer, Utils } from 'knowdesign';
|
||||||
|
import { IconFont } from '@knowdesign/icons';
|
||||||
import { PlusOutlined, QuestionCircleOutlined } from '@ant-design/icons';
|
import { PlusOutlined, QuestionCircleOutlined } from '@ant-design/icons';
|
||||||
import moment from 'moment';
|
import moment from 'moment';
|
||||||
import { defaultPagination } from 'constants/common';
|
import { defaultPagination } from '@src/constants/common';
|
||||||
import { UserProps, UserOperate } from './config';
|
import { UserProps, UserOperate } from './config';
|
||||||
import CheckboxGroupContainer from './CheckboxGroupContainer';
|
import CheckboxGroupContainer from './CheckboxGroupContainer';
|
||||||
import TagsWithHide from '../../components/TagsWithHide/index';
|
import TagsWithHide from '../../components/TagsWithHide/index';
|
||||||
import api from 'api';
|
import api from '@src/api';
|
||||||
import { ConfigPermissionMap } from '../CommonConfig';
|
import { ConfigPermissionMap } from '../CommonConfig';
|
||||||
|
|
||||||
const { confirm } = Modal;
|
const { confirm } = Modal;
|
||||||
@@ -341,22 +342,29 @@ export default (props: { curTabKey: string }) => {
|
|||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="operate-bar">
|
<div className="operate-bar">
|
||||||
<Form form={form} layout="inline" onFinish={() => getUserList({ page: 1 })}>
|
<div className="left">
|
||||||
<Form.Item name="userName">
|
<div className="refresh-icon" onClick={() => getUserList()}>
|
||||||
<Input placeholder="请输入用户账号" />
|
<IconFont className="icon" type="icon-shuaxin1" />
|
||||||
</Form.Item>
|
</div>
|
||||||
<Form.Item name="realName">
|
<Divider type="vertical" style={{ height: 20, top: 0 }} />
|
||||||
<Input placeholder="请输入用户实名" />
|
|
||||||
</Form.Item>
|
<Form form={form} layout="inline" onFinish={() => getUserList({ page: 1 })}>
|
||||||
<Form.Item name="roleId">
|
<Form.Item name="userName">
|
||||||
<Select style={{ width: 190 }} placeholder="选择平台已创建的角色名" options={simpleRoleList} />
|
<Input placeholder="请输入用户账号" />
|
||||||
</Form.Item>
|
</Form.Item>
|
||||||
<Form.Item>
|
<Form.Item name="realName">
|
||||||
<Button type="primary" ghost htmlType="submit">
|
<Input placeholder="请输入用户实名" />
|
||||||
查询
|
</Form.Item>
|
||||||
</Button>
|
<Form.Item name="roleId">
|
||||||
</Form.Item>
|
<Select style={{ width: 190 }} placeholder="选择平台已创建的角色名" options={simpleRoleList} />
|
||||||
</Form>
|
</Form.Item>
|
||||||
|
<Form.Item>
|
||||||
|
<Button type="primary" ghost htmlType="submit">
|
||||||
|
查询
|
||||||
|
</Button>
|
||||||
|
</Form.Item>
|
||||||
|
</Form>
|
||||||
|
</div>
|
||||||
{global.hasPermission && global.hasPermission(ConfigPermissionMap.USER_ADD) ? (
|
{global.hasPermission && global.hasPermission(ConfigPermissionMap.USER_ADD) ? (
|
||||||
<Button
|
<Button
|
||||||
type="primary"
|
type="primary"
|
||||||
|
|||||||
@@ -59,5 +59,6 @@ export enum RoleOperate {
|
|||||||
export interface FormItemPermission {
|
export interface FormItemPermission {
|
||||||
id: number;
|
id: number;
|
||||||
name: string;
|
name: string;
|
||||||
|
essentialPermission: { label: string; value: number };
|
||||||
options: { label: string; value: number }[];
|
options: { label: string; value: number }[];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,13 +44,3 @@
|
|||||||
.role-tab-assign-user .desc-row {
|
.role-tab-assign-user .desc-row {
|
||||||
margin-bottom: 24px;
|
margin-bottom: 24px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.operate-bar-right {
|
|
||||||
display: flex;
|
|
||||||
justify-content: right;
|
|
||||||
margin-bottom: 12px;
|
|
||||||
.search-input {
|
|
||||||
width: 248px;
|
|
||||||
margin-right: 8px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,56 +1,9 @@
|
|||||||
/* eslint-disable */
|
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
require('dotenv').config({ path: path.resolve(process.cwd(), '../../.env') });
|
require('dotenv').config({ path: path.resolve(process.cwd(), '../../.env') });
|
||||||
const isProd = process.env.NODE_ENV === 'production';
|
|
||||||
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
|
||||||
const webpack = require('webpack');
|
|
||||||
const merge = require('webpack-merge');
|
const merge = require('webpack-merge');
|
||||||
const pkgJson = require('./package');
|
const devMode = process.env.NODE_ENV === 'development';
|
||||||
const getWebpackCommonConfig = require('./config/d1-webpack.base');
|
const commonConfig = require('./config/webpack.common');
|
||||||
const outPath = path.resolve(__dirname, `../../../km-rest/src/main/resources/templates/${pkgJson.ident}`);
|
const devConfig = require('./config/webpack.dev');
|
||||||
const jsFileName = isProd ? '[name]-[chunkhash].js' : '[name].js';
|
const prodConfig = require('./config/webpack.prod');
|
||||||
|
|
||||||
module.exports = merge(getWebpackCommonConfig(), {
|
module.exports = merge(commonConfig, devMode ? devConfig : prodConfig);
|
||||||
mode: isProd ? 'production' : 'development',
|
|
||||||
entry: {
|
|
||||||
[pkgJson.ident]: ['./src/index.tsx'],
|
|
||||||
},
|
|
||||||
plugins: [
|
|
||||||
new webpack.DefinePlugin({
|
|
||||||
'process.env': {
|
|
||||||
NODE_ENV: JSON.stringify(process.env.NODE_ENV),
|
|
||||||
RUN_ENV: JSON.stringify(process.env.RUN_ENV),
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
new HtmlWebpackPlugin({
|
|
||||||
meta: {
|
|
||||||
manifest: 'manifest.json',
|
|
||||||
},
|
|
||||||
template: './src/index.html',
|
|
||||||
inject: 'body',
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
output: {
|
|
||||||
path: outPath,
|
|
||||||
publicPath: isProd ? `${process.env.PUBLIC_PATH}/${pkgJson.ident}/` : `http://localhost:${pkgJson.port}/${pkgJson.ident}/`,
|
|
||||||
library: pkgJson.ident,
|
|
||||||
libraryTarget: 'amd',
|
|
||||||
filename: jsFileName,
|
|
||||||
chunkFilename: jsFileName,
|
|
||||||
},
|
|
||||||
devtool: isProd ? 'none' : 'cheap-module-eval-source-map',
|
|
||||||
devServer: {
|
|
||||||
host: '127.0.0.1',
|
|
||||||
port: pkgJson.port,
|
|
||||||
hot: true,
|
|
||||||
open: false,
|
|
||||||
publicPath: `http://localhost:${pkgJson.port}/${pkgJson.ident}/`,
|
|
||||||
inline: true,
|
|
||||||
disableHostCheck: true,
|
|
||||||
historyApiFallback: true,
|
|
||||||
headers: {
|
|
||||||
'Access-Control-Allow-Origin': '*',
|
|
||||||
},
|
|
||||||
proxy: {},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -9,6 +9,5 @@ build/
|
|||||||
coverage
|
coverage
|
||||||
versions/
|
versions/
|
||||||
debug.log
|
debug.log
|
||||||
package-lock.json
|
|
||||||
yarn.lock
|
yarn.lock
|
||||||
.d1-workspace.json
|
.d1-workspace.json
|
||||||
|
|||||||
@@ -1,17 +1,21 @@
|
|||||||
## 使用说明
|
## 使用说明
|
||||||
|
|
||||||
### 依赖安装:
|
### 依赖安装(如在 km-console 目录下执行 npm run i 安装过依赖,这步可以省略):
|
||||||
|
|
||||||
```
|
```
|
||||||
npm install
|
npm install
|
||||||
```
|
```
|
||||||
|
|
||||||
|
注意,这种方式只会安装当前应用的依赖。如果您不了解,推荐在 km-console 目录下执行 npm run i 安装依赖。
|
||||||
|
|
||||||
### 启动:
|
### 启动:
|
||||||
|
|
||||||
```
|
```
|
||||||
npm run start
|
npm run start
|
||||||
```
|
```
|
||||||
|
|
||||||
|
启动后访问地址为 http://localhost:8000
|
||||||
|
|
||||||
### 构建:
|
### 构建:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -86,12 +86,12 @@ class CoverHtmlWebpackPlugin {
|
|||||||
|
|
||||||
assetJson.reverse().forEach((item) => {
|
assetJson.reverse().forEach((item) => {
|
||||||
if (/\.js$/.test(item)) {
|
if (/\.js$/.test(item)) {
|
||||||
// if (item.includes('vendor~')) {
|
if (item.includes('vendor~')) {
|
||||||
// vendors += `<script async src="${item}"></script>`;
|
vendors += `<script async src="${item}"></script>`;
|
||||||
// } else {
|
} else {
|
||||||
// TODO: entry 只有一个
|
// TODO: entry 只有一个
|
||||||
portalMap['@portal/layout'] = item;
|
portalMap['@portal/layout'] = item;
|
||||||
// }
|
}
|
||||||
} else if (/\.css$/.test(item)) {
|
} else if (/\.css$/.test(item)) {
|
||||||
links += `<link href="${item}" rel="stylesheet">`;
|
links += `<link href="${item}" rel="stylesheet">`;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,187 +0,0 @@
|
|||||||
/* eslint-disable */
|
|
||||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
|
||||||
const ProgressBarPlugin = require('progress-bar-webpack-plugin');
|
|
||||||
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
|
|
||||||
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
|
|
||||||
const CoverHtmlWebpackPlugin = require('./CoverHtmlWebpackPlugin.js');
|
|
||||||
var webpackConfigResolveAlias = require('./webpackConfigResolveAlias');
|
|
||||||
const TerserJSPlugin = require('terser-webpack-plugin');
|
|
||||||
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
|
||||||
const theme = require('./theme');
|
|
||||||
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
|
|
||||||
const HardSourceWebpackPlugin = require('hard-source-webpack-plugin');
|
|
||||||
|
|
||||||
const isProd = process.env.NODE_ENV === 'production';
|
|
||||||
const babelOptions = {
|
|
||||||
cacheDirectory: true,
|
|
||||||
babelrc: false,
|
|
||||||
presets: [require.resolve('@babel/preset-env'), require.resolve('@babel/preset-typescript'), require.resolve('@babel/preset-react')],
|
|
||||||
plugins: [
|
|
||||||
[require.resolve('@babel/plugin-proposal-decorators'), { legacy: true }],
|
|
||||||
[require.resolve('@babel/plugin-proposal-class-properties'), { loose: true }],
|
|
||||||
[require.resolve('@babel/plugin-proposal-private-property-in-object'), { loose: true }],
|
|
||||||
[require.resolve('@babel/plugin-proposal-private-methods'), { loose: true }],
|
|
||||||
require.resolve('@babel/plugin-proposal-export-default-from'),
|
|
||||||
require.resolve('@babel/plugin-proposal-export-namespace-from'),
|
|
||||||
require.resolve('@babel/plugin-proposal-object-rest-spread'),
|
|
||||||
require.resolve('@babel/plugin-transform-runtime'),
|
|
||||||
!isProd && require.resolve('react-refresh/babel'),
|
|
||||||
]
|
|
||||||
.filter(Boolean)
|
|
||||||
.concat([
|
|
||||||
[
|
|
||||||
'babel-plugin-import',
|
|
||||||
{
|
|
||||||
libraryName: 'antd',
|
|
||||||
style: true,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'@babel/plugin-transform-object-assign',
|
|
||||||
]),
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = () => {
|
|
||||||
const cssFileName = isProd ? '[name]-[chunkhash].css' : '[name].css';
|
|
||||||
const plugins = [
|
|
||||||
// !isProd && new HardSourceWebpackPlugin(),
|
|
||||||
new CoverHtmlWebpackPlugin(),
|
|
||||||
new ProgressBarPlugin(),
|
|
||||||
new CaseSensitivePathsPlugin(),
|
|
||||||
new MiniCssExtractPlugin({
|
|
||||||
filename: cssFileName,
|
|
||||||
}),
|
|
||||||
!isProd &&
|
|
||||||
new ReactRefreshWebpackPlugin({
|
|
||||||
overlay: false,
|
|
||||||
}),
|
|
||||||
].filter(Boolean);
|
|
||||||
const resolve = {
|
|
||||||
symlinks: false,
|
|
||||||
extensions: ['.web.jsx', '.web.js', '.ts', '.tsx', '.js', '.jsx', '.json'],
|
|
||||||
alias: webpackConfigResolveAlias,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (isProd) {
|
|
||||||
plugins.push(new CleanWebpackPlugin());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isProd) {
|
|
||||||
resolve.mainFields = ['module', 'browser', 'main'];
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
externals: isProd
|
|
||||||
? [
|
|
||||||
/^react$/,
|
|
||||||
/^react\/lib.*/,
|
|
||||||
/^react-dom$/,
|
|
||||||
/.*react-dom.*/,
|
|
||||||
/^single-spa$/,
|
|
||||||
/^single-spa-react$/,
|
|
||||||
/^moment$/,
|
|
||||||
/^antd$/,
|
|
||||||
/^lodash$/,
|
|
||||||
/^echarts$/,
|
|
||||||
/^react-router$/,
|
|
||||||
/^react-router-dom$/,
|
|
||||||
]
|
|
||||||
: [],
|
|
||||||
resolve,
|
|
||||||
plugins,
|
|
||||||
module: {
|
|
||||||
rules: [
|
|
||||||
{
|
|
||||||
parser: { system: false },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
test: /\.(js|jsx)$/,
|
|
||||||
exclude: /node_modules/,
|
|
||||||
use: [
|
|
||||||
{
|
|
||||||
loader: 'babel-loader',
|
|
||||||
options: babelOptions,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
test: /\.(ts|tsx)$/,
|
|
||||||
use: [
|
|
||||||
{
|
|
||||||
loader: 'babel-loader',
|
|
||||||
options: babelOptions,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
loader: 'ts-loader',
|
|
||||||
options: {
|
|
||||||
allowTsInNodeModules: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
test: /\.(png|svg|jpeg|jpg|gif|ttf|woff|woff2|eot|pdf|otf)$/,
|
|
||||||
use: [
|
|
||||||
{
|
|
||||||
loader: 'file-loader',
|
|
||||||
options: {
|
|
||||||
name: '[name].[ext]',
|
|
||||||
outputPath: './assets/image/',
|
|
||||||
esModule: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
test: /\.(css|less)$/,
|
|
||||||
use: [
|
|
||||||
{
|
|
||||||
loader: MiniCssExtractPlugin.loader,
|
|
||||||
},
|
|
||||||
'css-loader',
|
|
||||||
{
|
|
||||||
loader: 'less-loader',
|
|
||||||
options: {
|
|
||||||
javascriptEnabled: true,
|
|
||||||
modifyVars: theme,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
optimization: Object.assign(
|
|
||||||
// {
|
|
||||||
// splitChunks: {
|
|
||||||
// cacheGroups: {
|
|
||||||
// vendor: {
|
|
||||||
// test: /[\\/]node_modules[\\/]/,
|
|
||||||
// chunks: 'all',
|
|
||||||
// name: 'vendor',
|
|
||||||
// priority: 10,
|
|
||||||
// enforce: true,
|
|
||||||
// minChunks: 1,
|
|
||||||
// maxSize: 3500000,
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
isProd
|
|
||||||
? {
|
|
||||||
minimizer: [
|
|
||||||
new TerserJSPlugin({
|
|
||||||
cache: true,
|
|
||||||
sourceMap: true,
|
|
||||||
}),
|
|
||||||
new OptimizeCSSAssetsPlugin({}),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
: {}
|
|
||||||
),
|
|
||||||
devtool: isProd ? 'cheap-module-source-map' : '',
|
|
||||||
node: {
|
|
||||||
fs: 'empty',
|
|
||||||
net: 'empty',
|
|
||||||
tls: 'empty',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
};
|
|
||||||
123
km-console/packages/layout-clusters-fe/config/webpack.common.js
Normal file
123
km-console/packages/layout-clusters-fe/config/webpack.common.js
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
const path = require('path');
|
||||||
|
const theme = require('./theme');
|
||||||
|
const webpack = require('webpack');
|
||||||
|
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||||
|
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||||
|
const ProgressBarPlugin = require('progress-bar-webpack-plugin');
|
||||||
|
const CoverHtmlWebpackPlugin = require('./CoverHtmlWebpackPlugin.js');
|
||||||
|
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
|
||||||
|
|
||||||
|
const devMode = process.env.NODE_ENV === 'development';
|
||||||
|
const babelOptions = {
|
||||||
|
cacheDirectory: true,
|
||||||
|
babelrc: false,
|
||||||
|
presets: [require.resolve('@babel/preset-env'), require.resolve('@babel/preset-typescript'), require.resolve('@babel/preset-react')],
|
||||||
|
plugins: [
|
||||||
|
[require.resolve('@babel/plugin-proposal-decorators'), { legacy: true }],
|
||||||
|
[require.resolve('@babel/plugin-proposal-class-properties'), { loose: true }],
|
||||||
|
[require.resolve('@babel/plugin-proposal-private-property-in-object'), { loose: true }],
|
||||||
|
[require.resolve('@babel/plugin-proposal-private-methods'), { loose: true }],
|
||||||
|
require.resolve('@babel/plugin-proposal-export-default-from'),
|
||||||
|
require.resolve('@babel/plugin-proposal-export-namespace-from'),
|
||||||
|
require.resolve('@babel/plugin-proposal-object-rest-spread'),
|
||||||
|
require.resolve('@babel/plugin-transform-runtime'),
|
||||||
|
devMode && require.resolve('react-refresh/babel'),
|
||||||
|
devMode && [
|
||||||
|
'babel-plugin-import',
|
||||||
|
{
|
||||||
|
libraryName: 'antd',
|
||||||
|
style: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
].filter(Boolean),
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
entry: {
|
||||||
|
layout: ['./src/index.tsx'],
|
||||||
|
},
|
||||||
|
resolve: {
|
||||||
|
symlinks: false,
|
||||||
|
extensions: ['.web.jsx', '.web.js', '.ts', '.tsx', '.js', '.jsx', '.json'],
|
||||||
|
alias: {
|
||||||
|
'@src': path.resolve('src'),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
plugins: [
|
||||||
|
new CoverHtmlWebpackPlugin(),
|
||||||
|
new ProgressBarPlugin(),
|
||||||
|
new CaseSensitivePathsPlugin(),
|
||||||
|
new webpack.DefinePlugin({
|
||||||
|
'process.env': {
|
||||||
|
NODE_ENV: JSON.stringify(process.env.NODE_ENV),
|
||||||
|
RUN_ENV: JSON.stringify(process.env.RUN_ENV),
|
||||||
|
BUSINESS_VERSION: process.env.BUSINESS_VERSION === 'true',
|
||||||
|
PUBLIC_PATH: JSON.stringify(process.env.PUBLIC_PATH),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
new HtmlWebpackPlugin({
|
||||||
|
meta: {
|
||||||
|
manifest: 'manifest.json',
|
||||||
|
},
|
||||||
|
template: './src/index.html',
|
||||||
|
favicon: path.resolve('favicon.ico'),
|
||||||
|
inject: 'body',
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
module: {
|
||||||
|
rules: [
|
||||||
|
{
|
||||||
|
parser: { system: false },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: /\.(js|jsx|ts|tsx)$/,
|
||||||
|
exclude: /node_modules/,
|
||||||
|
use: [
|
||||||
|
{
|
||||||
|
loader: 'babel-loader',
|
||||||
|
options: babelOptions,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
loader: 'ts-loader',
|
||||||
|
options: {
|
||||||
|
allowTsInNodeModules: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: /\.(png|svg|jpeg|jpg|gif|ttf|woff|woff2|eot|pdf|otf)$/,
|
||||||
|
use: [
|
||||||
|
{
|
||||||
|
loader: 'file-loader',
|
||||||
|
options: {
|
||||||
|
name: '[name].[ext]',
|
||||||
|
outputPath: './assets/image/',
|
||||||
|
esModule: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: /\.(css|less)$/,
|
||||||
|
use: [
|
||||||
|
MiniCssExtractPlugin.loader,
|
||||||
|
'css-loader',
|
||||||
|
{
|
||||||
|
loader: 'less-loader',
|
||||||
|
options: {
|
||||||
|
javascriptEnabled: true,
|
||||||
|
modifyVars: theme,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
node: {
|
||||||
|
fs: 'empty',
|
||||||
|
net: 'empty',
|
||||||
|
tls: 'empty',
|
||||||
|
},
|
||||||
|
stats: 'errors-warnings',
|
||||||
|
};
|
||||||
45
km-console/packages/layout-clusters-fe/config/webpack.dev.js
Normal file
45
km-console/packages/layout-clusters-fe/config/webpack.dev.js
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||||
|
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
mode: 'development',
|
||||||
|
plugins: [
|
||||||
|
new MiniCssExtractPlugin(),
|
||||||
|
new ReactRefreshWebpackPlugin({
|
||||||
|
overlay: false,
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
output: {
|
||||||
|
path: '/',
|
||||||
|
publicPath: '/',
|
||||||
|
filename: '[name].js',
|
||||||
|
chunkFilename: '[name].js',
|
||||||
|
library: 'layout',
|
||||||
|
libraryTarget: 'amd',
|
||||||
|
},
|
||||||
|
devServer: {
|
||||||
|
host: 'localhost',
|
||||||
|
port: 8000,
|
||||||
|
hot: true,
|
||||||
|
open: true,
|
||||||
|
openPage: 'http://localhost:8000/',
|
||||||
|
inline: true,
|
||||||
|
historyApiFallback: true,
|
||||||
|
publicPath: `http://localhost:8000/`,
|
||||||
|
headers: {
|
||||||
|
'cache-control': 'no-cache',
|
||||||
|
pragma: 'no-cache',
|
||||||
|
'Access-Control-Allow-Origin': '*',
|
||||||
|
},
|
||||||
|
proxy: {
|
||||||
|
'/ks-km/api/v3': {
|
||||||
|
changeOrigin: true,
|
||||||
|
target: 'http://localhost:8080/',
|
||||||
|
},
|
||||||
|
'/logi-security/api/v1': {
|
||||||
|
changeOrigin: true,
|
||||||
|
target: 'http://localhost:8080/',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
@@ -0,0 +1,79 @@
|
|||||||
|
const path = require('path');
|
||||||
|
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
|
||||||
|
const CountPlugin = require('./CountComponentWebpackPlugin');
|
||||||
|
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||||
|
const CopyWebpackPlugin = require('copy-webpack-plugin');
|
||||||
|
const TerserJSPlugin = require('terser-webpack-plugin');
|
||||||
|
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
||||||
|
|
||||||
|
const outputPath = path.resolve(process.cwd(), `../../../km-rest/src/main/resources/templates/layout`);
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
mode: 'production',
|
||||||
|
plugins: [
|
||||||
|
new CleanWebpackPlugin(),
|
||||||
|
new CountPlugin({
|
||||||
|
pathname: 'knowdesign',
|
||||||
|
startCount: true,
|
||||||
|
isExportExcel: false,
|
||||||
|
}),
|
||||||
|
new MiniCssExtractPlugin({
|
||||||
|
filename: '[name]-[chunkhash].css',
|
||||||
|
}),
|
||||||
|
new CopyWebpackPlugin([
|
||||||
|
{
|
||||||
|
from: path.resolve(process.cwd(), 'static'),
|
||||||
|
to: path.resolve(outputPath, '../static'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
from: path.resolve(process.cwd(), 'favicon.ico'),
|
||||||
|
to: path.resolve(outputPath, '../favicon.ico'),
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
],
|
||||||
|
externals: [
|
||||||
|
/^react$/,
|
||||||
|
/^react\/lib.*/,
|
||||||
|
/^react-dom$/,
|
||||||
|
/.*react-dom.*/,
|
||||||
|
/^single-spa$/,
|
||||||
|
/^single-spa-react$/,
|
||||||
|
/^moment$/,
|
||||||
|
/^antd$/,
|
||||||
|
/^lodash$/,
|
||||||
|
/^echarts$/,
|
||||||
|
/^react-router$/,
|
||||||
|
/^react-router-dom$/,
|
||||||
|
],
|
||||||
|
output: {
|
||||||
|
path: outputPath,
|
||||||
|
publicPath: process.env.PUBLIC_PATH + '/layout/',
|
||||||
|
filename: '[name]-[chunkhash].js',
|
||||||
|
chunkFilename: '[name]-[chunkhash].js',
|
||||||
|
library: 'layout',
|
||||||
|
libraryTarget: 'amd',
|
||||||
|
},
|
||||||
|
optimization: {
|
||||||
|
splitChunks: {
|
||||||
|
cacheGroups: {
|
||||||
|
vendor: {
|
||||||
|
test: /[\\/]node_modules[\\/]/,
|
||||||
|
chunks: 'all',
|
||||||
|
name: 'vendor',
|
||||||
|
priority: 10,
|
||||||
|
enforce: true,
|
||||||
|
minChunks: 1,
|
||||||
|
maxSize: 3000000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
minimizer: [
|
||||||
|
new TerserJSPlugin({
|
||||||
|
cache: true,
|
||||||
|
sourceMap: true,
|
||||||
|
}),
|
||||||
|
new OptimizeCSSAssetsPlugin({}),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
devtool: 'none',
|
||||||
|
};
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
var path = require('path');
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
react: path.resolve('./node_modules/react'),
|
|
||||||
};
|
|
||||||
14847
km-console/packages/layout-clusters-fe/package-lock.json
generated
Normal file
14847
km-console/packages/layout-clusters-fe/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -17,7 +17,7 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||||
"start": "cross-env NODE_ENV=development webpack-dev-server",
|
"start": "cross-env NODE_ENV=development webpack-dev-server",
|
||||||
"build": "rm -rf ../../pub/layout & cross-env NODE_ENV=production webpack --max_old_space_size=8000"
|
"build": "cross-env NODE_ENV=production webpack --max_old_space_size=8000"
|
||||||
},
|
},
|
||||||
"browserslist": {
|
"browserslist": {
|
||||||
"production": [
|
"production": [
|
||||||
@@ -39,6 +39,7 @@
|
|||||||
"@types/react-copy-to-clipboard": "^5.0.2",
|
"@types/react-copy-to-clipboard": "^5.0.2",
|
||||||
"@types/react-dom": "^17.0.11",
|
"@types/react-dom": "^17.0.11",
|
||||||
"@types/react-highlight-words": "^0.16.0",
|
"@types/react-highlight-words": "^0.16.0",
|
||||||
|
"@types/react-router": "5.1.18",
|
||||||
"@types/react-router-dom": "^5.3.3",
|
"@types/react-router-dom": "^5.3.3",
|
||||||
"@types/react-transition-group": "^4.2.2",
|
"@types/react-transition-group": "^4.2.2",
|
||||||
"@types/react-virtualized": "^9.21.13",
|
"@types/react-virtualized": "^9.21.13",
|
||||||
@@ -48,6 +49,7 @@
|
|||||||
"crypto-js": "^4.1.1",
|
"crypto-js": "^4.1.1",
|
||||||
"dotenv": "^16.0.1",
|
"dotenv": "^16.0.1",
|
||||||
"html-webpack-plugin": "^4.0.0",
|
"html-webpack-plugin": "^4.0.0",
|
||||||
|
"knowdesign": "^1.3.7",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"moment": "^2.24.0",
|
"moment": "^2.24.0",
|
||||||
"react": "16.12.0",
|
"react": "16.12.0",
|
||||||
@@ -58,8 +60,8 @@
|
|||||||
"react-joyride": "^2.5.0",
|
"react-joyride": "^2.5.0",
|
||||||
"single-spa": "5.9.3",
|
"single-spa": "5.9.3",
|
||||||
"single-spa-react": "2.14.0",
|
"single-spa-react": "2.14.0",
|
||||||
"webpack-bundle-analyzer": "^4.5.0",
|
"tree-changes": "0.9.1",
|
||||||
"knowdesign": "1.3.7"
|
"webpack-bundle-analyzer": "^4.5.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@babel/core": "^7.5.5",
|
"@babel/core": "^7.5.5",
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ import React, { useState, useEffect, useLayoutEffect } from 'react';
|
|||||||
import { BrowserRouter, Switch, Route, useLocation, useHistory } from 'react-router-dom';
|
import { BrowserRouter, Switch, Route, useLocation, useHistory } from 'react-router-dom';
|
||||||
import { get as lodashGet } from 'lodash';
|
import { get as lodashGet } from 'lodash';
|
||||||
import { DProLayout, AppContainer, IconFont, Menu, Utils, Page403, Page404, Page500, Modal } from 'knowdesign';
|
import { DProLayout, AppContainer, IconFont, Menu, Utils, Page403, Page404, Page500, Modal } from 'knowdesign';
|
||||||
import dantdZhCN from 'knowdesign/lib/locale/zh_CN';
|
import dantdZhCN from 'knowdesign/es/locale/zh_CN';
|
||||||
import dantdEnUS from 'knowdesign/lib/locale/en_US';
|
import dantdEnUS from 'knowdesign/es/locale/en_US';
|
||||||
import { DotChartOutlined } from '@ant-design/icons';
|
import { DotChartOutlined } from '@ant-design/icons';
|
||||||
import { licenseEventBus } from './constants/axiosConfig';
|
import { licenseEventBus } from './constants/axiosConfig';
|
||||||
import intlZhCN from './locales/zh';
|
import intlZhCN from './locales/zh';
|
||||||
@@ -59,6 +59,7 @@ const logout = () => {
|
|||||||
}).then((res) => {
|
}).then((res) => {
|
||||||
window.location.href = '/login';
|
window.location.href = '/login';
|
||||||
});
|
});
|
||||||
|
localStorage.removeItem('userInfo');
|
||||||
};
|
};
|
||||||
|
|
||||||
const LicenseLimitModal = () => {
|
const LicenseLimitModal = () => {
|
||||||
@@ -117,7 +118,7 @@ const AppContent = (props: { setlanguage: (language: string) => void }) => {
|
|||||||
<DProLayout.Container
|
<DProLayout.Container
|
||||||
headerProps={{
|
headerProps={{
|
||||||
title: (
|
title: (
|
||||||
<div>
|
<div style={{ cursor: 'pointer' }}>
|
||||||
<img className="header-logo" src={ksLogo} />
|
<img className="header-logo" src={ksLogo} />
|
||||||
</div>
|
</div>
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ export default () => {
|
|||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<span style={{ display: 'inline-block', marginRight: '8px' }}>Similar Config</span>
|
<span style={{ display: 'inline-block', marginRight: '8px' }}>Similar Config</span>
|
||||||
<Tooltip overlayClassName="rebalance-tooltip" title="所有broker配置是否一致">
|
<Tooltip overlayClassName="rebalance-tooltip" title="所有Broker配置是否一致">
|
||||||
<QuestionCircleOutlined />
|
<QuestionCircleOutlined />
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
</div>
|
</div>
|
||||||
@@ -111,7 +111,7 @@ export default () => {
|
|||||||
];
|
];
|
||||||
setCardData(cordRightMap);
|
setCardData(cordRightMap);
|
||||||
});
|
});
|
||||||
Promise.all([brokerMetric, brokersState]).then((res) => {
|
Promise.all([brokerMetric, brokersState]).finally(() => {
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
});
|
});
|
||||||
}, [routeParams.clusterId]);
|
}, [routeParams.clusterId]);
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import React, { useState, useEffect } from 'react';
|
import React, { useState, useEffect } from 'react';
|
||||||
import { useParams } from 'react-router-dom';
|
import { useParams } from 'react-router-dom';
|
||||||
import CardBar from './index';
|
import CardBar from './index';
|
||||||
import { IconFont, Tag, Utils, Tooltip, Popover } from 'knowdesign';
|
import { IconFont, Tag, Utils, Tooltip, Popover, AppContainer } from 'knowdesign';
|
||||||
import api from '@src/api';
|
import api from '@src/api';
|
||||||
import StateChart from './StateChart';
|
import StateChart from './StateChart';
|
||||||
import ClusterNorms from '@src/pages/LoadRebalance/ClusterNorms';
|
import ClusterNorms from '@src/pages/LoadRebalance/ClusterNorms';
|
||||||
import { QuestionCircleOutlined } from '@ant-design/icons';
|
import { QuestionCircleOutlined } from '@ant-design/icons';
|
||||||
import moment from 'moment';
|
import moment from 'moment';
|
||||||
|
import { ClustersPermissionMap } from '@src/pages/CommonConfig';
|
||||||
|
|
||||||
const transUnitTimePro = (ms: number, num = 0) => {
|
const transUnitTimePro = (ms: number, num = 0) => {
|
||||||
if (!ms) return '';
|
if (!ms) return '';
|
||||||
@@ -23,6 +24,7 @@ const transUnitTimePro = (ms: number, num = 0) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const LoadRebalanceCardBar = (props: any) => {
|
const LoadRebalanceCardBar = (props: any) => {
|
||||||
|
const [global] = AppContainer.useGlobalValue();
|
||||||
const { clusterId } = useParams<{
|
const { clusterId } = useParams<{
|
||||||
clusterId: string;
|
clusterId: string;
|
||||||
}>();
|
}>();
|
||||||
@@ -53,12 +55,14 @@ const LoadRebalanceCardBar = (props: any) => {
|
|||||||
return (
|
return (
|
||||||
<div style={{ height: '20px' }}>
|
<div style={{ height: '20px' }}>
|
||||||
<span style={{ display: 'inline-block', marginRight: '8px' }}>State</span>
|
<span style={{ display: 'inline-block', marginRight: '8px' }}>State</span>
|
||||||
<IconFont
|
{global.hasPermission(ClustersPermissionMap.REBALANCE_SETTING) && (
|
||||||
className="cutomIcon-config"
|
<IconFont
|
||||||
style={{ fontSize: '15px' }}
|
className="cutomIcon-config"
|
||||||
onClick={() => setNormsVisible(true)}
|
style={{ fontSize: '15px' }}
|
||||||
type="icon-shezhi"
|
onClick={() => setNormsVisible(true)}
|
||||||
></IconFont>
|
type="icon-shezhi"
|
||||||
|
></IconFont>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,18 +1,20 @@
|
|||||||
import React, { forwardRef, useEffect, useImperativeHandle, useRef, useState } from 'react';
|
import React, { forwardRef, useEffect, useImperativeHandle, useMemo, useRef, useState } from 'react';
|
||||||
import { AppContainer, Button, Drawer, IconFont, message, Spin, Table, SingleChart, Utils, Tooltip } from 'knowdesign';
|
import { AppContainer, Drawer, Spin, Table, SingleChart, Utils, Tooltip } from 'knowdesign';
|
||||||
import moment from 'moment';
|
import moment from 'moment';
|
||||||
import api, { MetricType } from '@src/api';
|
import api, { MetricType } from '@src/api';
|
||||||
import { useParams } from 'react-router-dom';
|
import { useParams } from 'react-router-dom';
|
||||||
import { debounce } from 'lodash';
|
import { debounce } from 'lodash';
|
||||||
import { MetricDefaultChartDataType, MetricChartDataType, formatChartData, getDetailChartConfig } from './config';
|
import { MetricDefaultChartDataType, MetricChartDataType, formatChartData, getDetailChartConfig } from './config';
|
||||||
import { UNIT_MAP } from '@src/constants/chartConfig';
|
import { UNIT_MAP } from '@src/constants/chartConfig';
|
||||||
import { CloseOutlined } from '@ant-design/icons';
|
import RenderEmpty from '../RenderEmpty';
|
||||||
|
|
||||||
interface ChartDetailProps {
|
interface ChartDetailProps {
|
||||||
metricType: MetricType;
|
metricType: MetricType;
|
||||||
metricName: string;
|
metricName: string;
|
||||||
queryLines: string[];
|
queryLines: string[];
|
||||||
onClose: () => void;
|
setSliderRange: (range: string) => void;
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||||
|
setDisposeChartInstance: Function;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface MetricTableInfo {
|
interface MetricTableInfo {
|
||||||
@@ -24,6 +26,18 @@ interface MetricTableInfo {
|
|||||||
color: string;
|
color: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface ChartInfo {
|
||||||
|
chartInstance?: echarts.ECharts;
|
||||||
|
isLoadingAdditionData?: boolean;
|
||||||
|
isLoadedFullData?: boolean;
|
||||||
|
fullTimeRange?: readonly [number, number];
|
||||||
|
curTimeRange?: readonly [number, number];
|
||||||
|
sliderPos?: readonly [number, number];
|
||||||
|
transformUnit?: [string, number];
|
||||||
|
fullMetricData?: MetricChartDataType;
|
||||||
|
oldDataZoomOption?: any;
|
||||||
|
}
|
||||||
|
|
||||||
interface DataZoomEventProps {
|
interface DataZoomEventProps {
|
||||||
type: 'datazoom';
|
type: 'datazoom';
|
||||||
// 缩放的开始位置的百分比,0 - 100
|
// 缩放的开始位置的百分比,0 - 100
|
||||||
@@ -34,8 +48,6 @@ interface DataZoomEventProps {
|
|||||||
|
|
||||||
// 缩放区默认选中范围比例(0.01~1)
|
// 缩放区默认选中范围比例(0.01~1)
|
||||||
const DATA_ZOOM_DEFAULT_SCALE = 0.25;
|
const DATA_ZOOM_DEFAULT_SCALE = 0.25;
|
||||||
// 选中范围最少展示的时间长度(默认 10 分钟),单位: ms
|
|
||||||
const LEAST_SELECTED_TIME_RANGE = 1 * 60 * 1000;
|
|
||||||
// 单次向服务器请求数据的范围(默认 6 小时,超过后采集频率间隔会变长),单位: ms
|
// 单次向服务器请求数据的范围(默认 6 小时,超过后采集频率间隔会变长),单位: ms
|
||||||
const DEFAULT_REQUEST_TIME_RANGE = 6 * 60 * 60 * 1000;
|
const DEFAULT_REQUEST_TIME_RANGE = 6 * 60 * 60 * 1000;
|
||||||
// 采样间隔,影响前端补点逻辑,单位: ms
|
// 采样间隔,影响前端补点逻辑,单位: ms
|
||||||
@@ -47,70 +59,15 @@ const DEFAULT_ENTER_TIME_RANGE = 2 * 60 * 60 * 1000;
|
|||||||
// 预缓存数据阈值,图表展示数据的开始时间处于前端缓存数据的时间范围的前 40% 时,向服务器请求数据
|
// 预缓存数据阈值,图表展示数据的开始时间处于前端缓存数据的时间范围的前 40% 时,向服务器请求数据
|
||||||
const PRECACHE_THRESHOLD = 0.4;
|
const PRECACHE_THRESHOLD = 0.4;
|
||||||
|
|
||||||
// 表格列
|
|
||||||
const colunms = [
|
|
||||||
{
|
|
||||||
title: 'Host',
|
|
||||||
dataIndex: 'name',
|
|
||||||
width: 200,
|
|
||||||
render(name: string, record: any) {
|
|
||||||
return (
|
|
||||||
<div style={{ display: 'flex', alignItems: 'center' }}>
|
|
||||||
<div style={{ width: 8, height: 2, marginRight: 4, background: record.color }}></div>
|
|
||||||
<span>{name}</span>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Avg',
|
|
||||||
dataIndex: 'avg',
|
|
||||||
width: 120,
|
|
||||||
render(num: number) {
|
|
||||||
return num.toFixed(2);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Max',
|
|
||||||
dataIndex: 'max',
|
|
||||||
width: 120,
|
|
||||||
render(num: number, record: any) {
|
|
||||||
return (
|
|
||||||
<div>
|
|
||||||
<span>{num.toFixed(2)}</span>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Min',
|
|
||||||
dataIndex: 'min',
|
|
||||||
width: 120,
|
|
||||||
render(num: number, record: any) {
|
|
||||||
return (
|
|
||||||
<div>
|
|
||||||
<span>{num.toFixed(2)}</span>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: 'Latest',
|
|
||||||
dataIndex: 'latest',
|
|
||||||
width: 120,
|
|
||||||
render(latest: number[]) {
|
|
||||||
return `${latest[1].toFixed(2)}`;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const ChartDetail = (props: ChartDetailProps) => {
|
const ChartDetail = (props: ChartDetailProps) => {
|
||||||
const [global] = AppContainer.useGlobalValue();
|
const [global] = AppContainer.useGlobalValue();
|
||||||
const { clusterId } = useParams<{
|
const { clusterId } = useParams<{
|
||||||
clusterId: string;
|
clusterId: string;
|
||||||
}>();
|
}>();
|
||||||
const { metricType, metricName, queryLines, onClose } = props;
|
const { metricType, metricName, queryLines, setSliderRange, setDisposeChartInstance } = props;
|
||||||
|
|
||||||
|
// 初始化拖拽防抖函数
|
||||||
|
const debouncedZoomDrag = useRef(null);
|
||||||
// 存储图表相关的不需要触发渲染的数据,用于计算图表展示状态并进行操作
|
// 存储图表相关的不需要触发渲染的数据,用于计算图表展示状态并进行操作
|
||||||
const chartInfo = useRef(
|
const chartInfo = useRef(
|
||||||
(() => {
|
(() => {
|
||||||
@@ -119,16 +76,16 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
const curTimeRange = [curTime - DEFAULT_ENTER_TIME_RANGE, curTime] as const;
|
const curTimeRange = [curTime - DEFAULT_ENTER_TIME_RANGE, curTime] as const;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
chartInstance: undefined as echarts.ECharts,
|
chartInstance: undefined,
|
||||||
|
isLoadingAdditionData: false,
|
||||||
isLoadedFullData: false,
|
isLoadedFullData: false,
|
||||||
fullTimeRange: curTimeRange,
|
fullTimeRange: curTimeRange,
|
||||||
fullMetricData: {} as MetricChartDataType,
|
fullMetricData: {} as MetricChartDataType,
|
||||||
curTimeRange,
|
curTimeRange,
|
||||||
oldDataZoomOption: {} as any,
|
oldDataZoomOption: {},
|
||||||
sliderPos: [0, 0] as readonly [number, number],
|
sliderPos: [0, 0],
|
||||||
sliderRange: '',
|
transformUnit: undefined,
|
||||||
transformUnit: undefined as [string, number],
|
} as ChartInfo;
|
||||||
};
|
|
||||||
})()
|
})()
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -137,8 +94,76 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
const [curMetricData, setCurMetricData] = useState<MetricChartDataType>();
|
const [curMetricData, setCurMetricData] = useState<MetricChartDataType>();
|
||||||
// 图表数据的各项计算指标
|
// 图表数据的各项计算指标
|
||||||
const [tableInfo, setTableInfo] = useState<MetricTableInfo[]>([]);
|
const [tableInfo, setTableInfo] = useState<MetricTableInfo[]>([]);
|
||||||
// 选中展示的图表
|
const [linesStatus, setLinesStatus] = useState<{
|
||||||
const [selectedLines, setSelectedLines] = useState<string[]>([]);
|
[lineName: string]: boolean;
|
||||||
|
}>({});
|
||||||
|
|
||||||
|
// 表格列
|
||||||
|
const colunms = useMemo(
|
||||||
|
() => [
|
||||||
|
{
|
||||||
|
title: metricType === MetricType.Broker ? 'Host' : 'Topic',
|
||||||
|
dataIndex: 'name',
|
||||||
|
width: 200,
|
||||||
|
render(name: string, record: any) {
|
||||||
|
return (
|
||||||
|
<div style={{ display: 'flex', alignItems: 'center' }}>
|
||||||
|
<div style={{ width: 8, height: 2, marginRight: 4, background: record.color }}></div>
|
||||||
|
<span>{name}</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'Avg',
|
||||||
|
dataIndex: 'avg',
|
||||||
|
width: 120,
|
||||||
|
render(num: number) {
|
||||||
|
return num.toFixed(2);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'Max',
|
||||||
|
dataIndex: 'max',
|
||||||
|
width: 120,
|
||||||
|
render(num: number, record: any) {
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<span>{num.toFixed(2)}</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'Min',
|
||||||
|
dataIndex: 'min',
|
||||||
|
width: 120,
|
||||||
|
render(num: number, record: any) {
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<span>{num.toFixed(2)}</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'Latest',
|
||||||
|
dataIndex: 'latest',
|
||||||
|
width: 120,
|
||||||
|
render(latest: number[]) {
|
||||||
|
return `${latest[1].toFixed(2)}`;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
[metricType]
|
||||||
|
);
|
||||||
|
|
||||||
|
const updateChartInfo = (changedInfo: ChartInfo) => {
|
||||||
|
chartInfo.current = {
|
||||||
|
...chartInfo.current,
|
||||||
|
...changedInfo,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
// 请求图表数据
|
// 请求图表数据
|
||||||
const getMetricChartData = ([startTime, endTime]: readonly [number, number]) => {
|
const getMetricChartData = ([startTime, endTime]: readonly [number, number]) => {
|
||||||
@@ -175,11 +200,10 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
|
|
||||||
// 如果滑块整体拖动,则只更新拖动后滑块的位(保留小数点后三位是防止低位值的干扰)
|
// 如果滑块整体拖动,则只更新拖动后滑块的位(保留小数点后三位是防止低位值的干扰)
|
||||||
if (oldScale.toFixed(3) === newScale.toFixed(3)) {
|
if (oldScale.toFixed(3) === newScale.toFixed(3)) {
|
||||||
chartInfo.current = {
|
updateChartInfo({
|
||||||
...chartInfo.current,
|
|
||||||
sliderPos: [newStartSliderPos, newEndSliderPos],
|
sliderPos: [newStartSliderPos, newEndSliderPos],
|
||||||
oldDataZoomOption: newDataZoomOption,
|
oldDataZoomOption: newDataZoomOption,
|
||||||
};
|
});
|
||||||
renderTableInfo();
|
renderTableInfo();
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
@@ -217,23 +241,14 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// 3. 滑块拖动后缩放比例变小
|
// 3. 滑块拖动后缩放比例变小
|
||||||
// 判断拖动后选择的时间范围并提示
|
|
||||||
if (newEndSliderPos - newStartSliderPos < LEAST_SELECTED_TIME_RANGE) {
|
|
||||||
// TODO: 补充逻辑
|
|
||||||
updateChartData([oldStartTimestamp, oldEndTimestamp], [oldStartSliderPos, oldEndSliderPos]);
|
|
||||||
message.warning(`当前选择范围小于 ${LEAST_SELECTED_TIME_RANGE / 60 / 1000} 分钟,图表可能无数据`);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const isOldLarger = oldScale - DATA_ZOOM_DEFAULT_SCALE > 0.01;
|
const isOldLarger = oldScale - DATA_ZOOM_DEFAULT_SCALE > 0.01;
|
||||||
const isNewLarger = newScale - DATA_ZOOM_DEFAULT_SCALE > 0.01;
|
const isNewLarger = newScale - DATA_ZOOM_DEFAULT_SCALE > 0.01;
|
||||||
if (isOldLarger && isNewLarger) {
|
if (isOldLarger && isNewLarger) {
|
||||||
// 如果拖拽前后比例均高于默认比例,则不对图表展示范围进行操作
|
// 如果拖拽前后比例均高于默认比例,则不对图表展示范围进行操作
|
||||||
chartInfo.current = {
|
updateChartInfo({
|
||||||
...chartInfo.current,
|
|
||||||
sliderPos: [newStartSliderPos, newEndSliderPos],
|
sliderPos: [newStartSliderPos, newEndSliderPos],
|
||||||
oldDataZoomOption: newDataZoomOption,
|
oldDataZoomOption: newDataZoomOption,
|
||||||
};
|
});
|
||||||
renderTableInfo();
|
renderTableInfo();
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
@@ -259,79 +274,98 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
const updateChartData = (timeRange: [number, number], sliderPos: [number, number]) => {
|
const updateChartData = (timeRange: [number, number], sliderPos: [number, number]) => {
|
||||||
const {
|
const {
|
||||||
fullTimeRange: [fullStartTimestamp, fullEndTimestamp],
|
fullTimeRange: [fullStartTimestamp, fullEndTimestamp],
|
||||||
fullMetricData,
|
|
||||||
isLoadedFullData,
|
isLoadedFullData,
|
||||||
} = chartInfo.current;
|
} = chartInfo.current;
|
||||||
let leftBoundaryTimestamp = Math.floor(timeRange[0]);
|
const leftBoundaryTimestamp = Math.floor(timeRange[0]);
|
||||||
const isNeedCacheExtraData = leftBoundaryTimestamp < fullStartTimestamp + (fullEndTimestamp - fullStartTimestamp) * PRECACHE_THRESHOLD;
|
const isNeedCacheExtraData = leftBoundaryTimestamp < fullStartTimestamp + (fullEndTimestamp - fullStartTimestamp) * PRECACHE_THRESHOLD;
|
||||||
|
|
||||||
let isRendered = false;
|
let isRendered = false;
|
||||||
// 如果本地存储的数据足够展示或者已经获取到所有数据,则展示数据
|
// 如果本地存储的数据足够展示或者已经获取到所有数据,则展示数据
|
||||||
if (leftBoundaryTimestamp > fullStartTimestamp || isLoadedFullData) {
|
if (leftBoundaryTimestamp > fullStartTimestamp || isLoadedFullData) {
|
||||||
chartInfo.current = {
|
updateChartInfo({
|
||||||
...chartInfo.current,
|
|
||||||
curTimeRange: [leftBoundaryTimestamp > fullStartTimestamp ? leftBoundaryTimestamp : fullStartTimestamp, timeRange[1]],
|
curTimeRange: [leftBoundaryTimestamp > fullStartTimestamp ? leftBoundaryTimestamp : fullStartTimestamp, timeRange[1]],
|
||||||
sliderPos,
|
sliderPos,
|
||||||
};
|
});
|
||||||
renderNewMetricData();
|
renderNewMetricData();
|
||||||
isRendered = true;
|
isRendered = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!isLoadedFullData && isNeedCacheExtraData) {
|
if (!isLoadedFullData && isNeedCacheExtraData) {
|
||||||
// 向服务器请求新的数据缓存
|
getAdditionChartData(!isRendered, leftBoundaryTimestamp, timeRange[1], sliderPos);
|
||||||
let reqEndTime = fullStartTimestamp;
|
}
|
||||||
const requestArr: any[] = [];
|
};
|
||||||
const requestTimeRanges: [number, number][] = [];
|
|
||||||
for (let i = 0; i < DEFAULT_REQUEST_COUNT; i++) {
|
|
||||||
setTimeout(() => {
|
|
||||||
const nextReqEndTime = reqEndTime - DEFAULT_REQUEST_TIME_RANGE;
|
|
||||||
requestArr.unshift(getMetricChartData([nextReqEndTime, reqEndTime]));
|
|
||||||
requestTimeRanges.unshift([nextReqEndTime, reqEndTime]);
|
|
||||||
reqEndTime = nextReqEndTime;
|
|
||||||
|
|
||||||
// 当最后一次请求发送后,处理返回
|
// 缓存增量的图表数据
|
||||||
if (i === DEFAULT_REQUEST_COUNT - 1) {
|
const getAdditionChartData = (
|
||||||
Promise.all(requestArr).then((resList) => {
|
needRender: boolean,
|
||||||
let isSettle = -1;
|
leftBoundaryTimestamp: number,
|
||||||
// 填充增量的图表数据
|
rightBoundaryTimestamp: number,
|
||||||
resList.forEach((res: MetricDefaultChartDataType[], i) => {
|
sliderPos?: [number, number]
|
||||||
// 图表没有返回数据的情况
|
) => {
|
||||||
if (!res?.length) {
|
const {
|
||||||
if (isSettle === -1) {
|
fullTimeRange: [fullStartTimestamp, fullEndTimestamp],
|
||||||
chartInfo.current = {
|
fullMetricData,
|
||||||
...chartInfo.current,
|
isLoadingAdditionData,
|
||||||
// 标记数据已经全部加载完毕
|
} = chartInfo.current;
|
||||||
isLoadedFullData: true,
|
|
||||||
};
|
|
||||||
isSettle = i;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
resolveAdditionChartData(res, requestTimeRanges[i]);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
// 更新左侧边界为当前已获取到数据的最小边界
|
|
||||||
const curLocalStartTimestamp = Number(fullMetricData.metricLines.map((line) => line.data[0][0]).sort()[0]);
|
|
||||||
if (leftBoundaryTimestamp < curLocalStartTimestamp) {
|
|
||||||
leftBoundaryTimestamp = curLocalStartTimestamp;
|
|
||||||
}
|
|
||||||
|
|
||||||
chartInfo.current = {
|
// 当前有缓存数据的任务时,直接退出
|
||||||
...chartInfo.current,
|
if (isLoadingAdditionData) {
|
||||||
fullTimeRange: [reqEndTime - DEFAULT_REQUEST_TIME_RANGE, fullEndTimestamp],
|
return false;
|
||||||
sliderPos,
|
}
|
||||||
};
|
updateChartInfo({
|
||||||
if (!isRendered) {
|
isLoadingAdditionData: true,
|
||||||
chartInfo.current = {
|
});
|
||||||
...chartInfo.current,
|
|
||||||
curTimeRange: [leftBoundaryTimestamp, timeRange[1]],
|
let reqEndTime = fullStartTimestamp;
|
||||||
};
|
const requestArr: any[] = [];
|
||||||
renderNewMetricData();
|
const requestTimeRanges: [number, number][] = [];
|
||||||
|
for (let i = 0; i < DEFAULT_REQUEST_COUNT; i++) {
|
||||||
|
setTimeout(() => {
|
||||||
|
const nextReqEndTime = reqEndTime - DEFAULT_REQUEST_TIME_RANGE;
|
||||||
|
requestArr.push(getMetricChartData([nextReqEndTime, reqEndTime]));
|
||||||
|
requestTimeRanges.push([nextReqEndTime, reqEndTime]);
|
||||||
|
reqEndTime = nextReqEndTime;
|
||||||
|
|
||||||
|
// 当最后一次请求发送后,处理返回
|
||||||
|
if (i === DEFAULT_REQUEST_COUNT - 1) {
|
||||||
|
Promise.all(requestArr).then((resList) => {
|
||||||
|
// 填充增量的图表数据
|
||||||
|
resList.forEach((res: MetricDefaultChartDataType[], i) => {
|
||||||
|
// 最后一个请求返回数据为空时,认为已获取到全部图表数据
|
||||||
|
if (!res?.length) {
|
||||||
|
// 标记数据已经全部加载完毕
|
||||||
|
i === resList.length - 1 &&
|
||||||
|
updateChartInfo({
|
||||||
|
isLoadedFullData: true,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// TODO: res 可能为 [],需要处理兼容
|
||||||
|
resolveAdditionChartData(res, requestTimeRanges[i]);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
|
||||||
}, i * 10);
|
// 更新左侧边界为当前已获取到数据的最小边界
|
||||||
}
|
const curLocalStartTimestamp = Number(fullMetricData.metricLines.map((line) => line?.data?.[0]?.[0]).sort()[0]);
|
||||||
|
if (leftBoundaryTimestamp < curLocalStartTimestamp) {
|
||||||
|
leftBoundaryTimestamp = curLocalStartTimestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
updateChartInfo({
|
||||||
|
fullTimeRange: [reqEndTime - DEFAULT_REQUEST_TIME_RANGE, fullEndTimestamp],
|
||||||
|
...(sliderPos ? { sliderPos } : {}),
|
||||||
|
isLoadingAdditionData: false,
|
||||||
|
});
|
||||||
|
if (needRender) {
|
||||||
|
updateChartInfo({
|
||||||
|
curTimeRange: [leftBoundaryTimestamp, rightBoundaryTimestamp],
|
||||||
|
});
|
||||||
|
renderNewMetricData();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, i * 10);
|
||||||
}
|
}
|
||||||
|
return true;
|
||||||
};
|
};
|
||||||
|
|
||||||
// 处理增量图表数据
|
// 处理增量图表数据
|
||||||
@@ -362,7 +396,7 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// 根据需要展示的时间范围过滤出对应的数据展示
|
// 根据需要展示的时间范围过滤出对应的数据
|
||||||
const renderNewMetricData = () => {
|
const renderNewMetricData = () => {
|
||||||
const { fullMetricData, curTimeRange } = chartInfo.current;
|
const { fullMetricData, curTimeRange } = chartInfo.current;
|
||||||
const newMetricData = { ...fullMetricData };
|
const newMetricData = { ...fullMetricData };
|
||||||
@@ -378,12 +412,25 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
});
|
});
|
||||||
newMetricData.metricLines[i] = line;
|
newMetricData.metricLines[i] = line;
|
||||||
});
|
});
|
||||||
|
|
||||||
// 只过滤出当前时间段有数据点的线条,确保 Table 统一展示
|
// 只过滤出当前时间段有数据点的线条,确保 Table 统一展示
|
||||||
newMetricData.metricLines = newMetricData.metricLines.filter((line) => line.data.length);
|
newMetricData.metricLines = newMetricData.metricLines.filter((line) => line.data.length);
|
||||||
setCurMetricData(newMetricData);
|
setCurMetricData(newMetricData);
|
||||||
|
|
||||||
|
setLinesStatus((curStatus) => {
|
||||||
|
// 过滤维持线条选中状态
|
||||||
|
const newLinesStatus = { ...curStatus };
|
||||||
|
const newLineNames = newMetricData.metricLines.map((line) => line.name);
|
||||||
|
newLineNames.forEach((name) => {
|
||||||
|
if (newLinesStatus[name] === undefined) {
|
||||||
|
newLinesStatus[name] = false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return newLinesStatus;
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// 计算当前选中范围
|
// 计算展示当前拖拽轴选中的时间范围
|
||||||
const calculateSliderRange = () => {
|
const calculateSliderRange = () => {
|
||||||
const { sliderPos } = chartInfo.current;
|
const { sliderPos } = chartInfo.current;
|
||||||
let minutes = Number(((sliderPos[1] - sliderPos[0]) / 60 / 1000).toFixed(2));
|
let minutes = Number(((sliderPos[1] - sliderPos[0]) / 60 / 1000).toFixed(2));
|
||||||
@@ -398,13 +445,11 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
hours = Number((hours % 24).toFixed(2));
|
hours = Number((hours % 24).toFixed(2));
|
||||||
}
|
}
|
||||||
|
|
||||||
chartInfo.current = {
|
const sliderRange = ` 当前选中范围: ${days > 0 ? `${days} 天 ` : ''}${hours > 0 ? `${hours} 小时 ` : ''}${minutes} 分钟`;
|
||||||
...chartInfo.current,
|
setSliderRange(sliderRange);
|
||||||
sliderRange: ` 当前选中范围: ${days > 0 ? `${days} 天 ` : ''}${hours > 0 ? `${hours} 小时 ` : ''}${minutes} 分钟`,
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// 遍历图表,获取需要的指标数据,展示到 Table
|
// 遍历图表,计算得到指标聚合数据展示到表格
|
||||||
const renderTableInfo = () => {
|
const renderTableInfo = () => {
|
||||||
const tableData: MetricTableInfo[] = [];
|
const tableData: MetricTableInfo[] = [];
|
||||||
const { sliderPos, chartInstance } = chartInfo.current;
|
const { sliderPos, chartInstance } = chartInfo.current;
|
||||||
@@ -447,140 +492,131 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
|
|
||||||
calculateSliderRange();
|
calculateSliderRange();
|
||||||
setTableInfo(tableData);
|
setTableInfo(tableData);
|
||||||
setSelectedLines(tableData.map((line) => line.name));
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const tableLineChange = (keys: string[]) => {
|
const tableLineChange = (keys: string[]) => {
|
||||||
const updatedLines: { [name: string]: boolean } = {};
|
const newLinesStatus = { ...linesStatus };
|
||||||
selectedLines.forEach((name) => !keys.includes(name) && (updatedLines[name] = false));
|
|
||||||
keys.forEach((name) => !selectedLines.includes(name) && (updatedLines[name] = true));
|
|
||||||
|
|
||||||
// 更新
|
Object.entries(newLinesStatus).forEach(([name, status]) => {
|
||||||
Object.keys(updatedLines).forEach((name) => {
|
if (keys.includes(name)) {
|
||||||
chartInfo.current.chartInstance.dispatchAction({
|
!status && (newLinesStatus[name] = true);
|
||||||
type: 'legendToggleSelect',
|
} else {
|
||||||
// 图例名称
|
status && (newLinesStatus[name] = false);
|
||||||
name: name,
|
}
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
setSelectedLines(keys);
|
setLinesStatus(newLinesStatus);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// 图表数据更新渲染后,更新图表拖拽轴信息并重新计算列表值
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (curMetricData) {
|
if (curMetricData) {
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
// 新的图表数据渲染后,更新图表拖拽轴信息
|
|
||||||
chartInfo.current.oldDataZoomOption = (chartInfo.current.chartInstance.getOption() as any).dataZoom[0];
|
chartInfo.current.oldDataZoomOption = (chartInfo.current.chartInstance.getOption() as any).dataZoom[0];
|
||||||
});
|
});
|
||||||
renderTableInfo();
|
renderTableInfo();
|
||||||
}
|
}
|
||||||
}, [curMetricData]);
|
}, [curMetricData]);
|
||||||
|
|
||||||
|
// 更新图例选中状态
|
||||||
|
useEffect(() => {
|
||||||
|
Object.entries(linesStatus).map(([name, status]) => {
|
||||||
|
const type = status ? 'legendSelect' : 'legendUnSelect';
|
||||||
|
chartInfo.current.chartInstance.dispatchAction({
|
||||||
|
type,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}, [linesStatus]);
|
||||||
|
|
||||||
// 进入详情时,首次获取数据
|
// 进入详情时,首次获取数据
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (metricType && metricName) {
|
if (metricType && metricName) {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
const { curTimeRange } = chartInfo.current;
|
const { curTimeRange } = chartInfo.current;
|
||||||
getMetricChartData(curTimeRange).then((res: any[] | null) => {
|
getMetricChartData(curTimeRange).then(
|
||||||
// 如果图表返回数据
|
(res: any[] | null) => {
|
||||||
if (res?.length) {
|
// 如果图表返回数据
|
||||||
// 格式化图表需要的数据
|
if (res?.length) {
|
||||||
const formattedMetricData = (
|
// 格式化图表需要的数据
|
||||||
formatChartData(
|
const formattedMetricData = (
|
||||||
res,
|
formatChartData(
|
||||||
global.getMetricDefine || {},
|
res,
|
||||||
metricType,
|
global.getMetricDefine || {},
|
||||||
curTimeRange,
|
metricType,
|
||||||
DEFAULT_POINT_INTERVAL,
|
curTimeRange,
|
||||||
false
|
DEFAULT_POINT_INTERVAL,
|
||||||
) as MetricChartDataType[]
|
false
|
||||||
)[0];
|
) as MetricChartDataType[]
|
||||||
// 填充图表数据
|
)[0];
|
||||||
let initFullTimeRange = curTimeRange;
|
// 填充图表数据
|
||||||
const pointsOfFirstLine = formattedMetricData.metricLines.find((line) => line.data.length).data;
|
let initFullTimeRange = curTimeRange;
|
||||||
if (pointsOfFirstLine) {
|
const pointsOfFirstLine = formattedMetricData.metricLines.find((line) => line.data.length).data;
|
||||||
initFullTimeRange = [pointsOfFirstLine[0][0] as number, pointsOfFirstLine[pointsOfFirstLine.length - 1][0] as number] as const;
|
if (pointsOfFirstLine) {
|
||||||
}
|
initFullTimeRange = [
|
||||||
|
pointsOfFirstLine[0][0] as number,
|
||||||
// 获取单位保存起来
|
pointsOfFirstLine[pointsOfFirstLine.length - 1][0] as number,
|
||||||
let transformUnit = undefined;
|
] as const;
|
||||||
Object.entries(UNIT_MAP).forEach((unit) => {
|
|
||||||
if (formattedMetricData.metricUnit.includes(unit[0])) {
|
|
||||||
transformUnit = unit;
|
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
chartInfo.current = {
|
// 获取单位保存起来
|
||||||
...chartInfo.current,
|
let transformUnit = undefined;
|
||||||
fullMetricData: formattedMetricData,
|
Object.entries(UNIT_MAP).forEach((unit) => {
|
||||||
fullTimeRange: [...initFullTimeRange],
|
if (formattedMetricData.metricUnit.includes(unit[0])) {
|
||||||
curTimeRange: [...initFullTimeRange],
|
transformUnit = unit;
|
||||||
sliderPos: [
|
}
|
||||||
initFullTimeRange[1] - (initFullTimeRange[1] - initFullTimeRange[0]) * DATA_ZOOM_DEFAULT_SCALE,
|
});
|
||||||
initFullTimeRange[1],
|
|
||||||
],
|
updateChartInfo({
|
||||||
transformUnit,
|
fullMetricData: formattedMetricData,
|
||||||
};
|
fullTimeRange: [...initFullTimeRange],
|
||||||
setCurMetricData(formattedMetricData);
|
curTimeRange: [...initFullTimeRange],
|
||||||
setLoading(false);
|
sliderPos: [
|
||||||
}
|
initFullTimeRange[1] - (initFullTimeRange[1] - initFullTimeRange[0]) * DATA_ZOOM_DEFAULT_SCALE,
|
||||||
});
|
initFullTimeRange[1],
|
||||||
|
],
|
||||||
|
transformUnit,
|
||||||
|
});
|
||||||
|
setCurMetricData(formattedMetricData);
|
||||||
|
const newLinesStatus: { [lineName: string]: boolean } = {};
|
||||||
|
formattedMetricData.metricLines.forEach((line) => {
|
||||||
|
newLinesStatus[line.name] = true;
|
||||||
|
});
|
||||||
|
setLinesStatus(newLinesStatus);
|
||||||
|
setLoading(false);
|
||||||
|
getAdditionChartData(false, initFullTimeRange[0], initFullTimeRange[1]);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
() => setLoading(false)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const debounced = debounce(onDataZoomDrag, 300);
|
debouncedZoomDrag.current = debounce(onDataZoomDrag, 300);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Spin spinning={loading}>
|
<Spin spinning={loading}>
|
||||||
<div className="chart-detail-modal-container">
|
<div className="chart-detail-modal-container">
|
||||||
{curMetricData && (
|
{curMetricData ? (
|
||||||
<>
|
<>
|
||||||
<div className="detail-title">
|
|
||||||
<div className="left">
|
|
||||||
<div className="title">
|
|
||||||
<Tooltip
|
|
||||||
placement="bottomLeft"
|
|
||||||
title={() => {
|
|
||||||
let content = '';
|
|
||||||
const metricDefine = global.getMetricDefine(metricType, curMetricData.metricName);
|
|
||||||
if (metricDefine) {
|
|
||||||
content = metricDefine.desc;
|
|
||||||
}
|
|
||||||
return content;
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<span style={{ cursor: 'pointer' }}>
|
|
||||||
<span>{curMetricData.metricName}</span> <span className="unit">({curMetricData.metricUnit}) </span>
|
|
||||||
</span>
|
|
||||||
</Tooltip>
|
|
||||||
</div>
|
|
||||||
<div className="info">{chartInfo.current.sliderRange}</div>
|
|
||||||
</div>
|
|
||||||
<div className="right">
|
|
||||||
<Button type="text" size="small" onClick={onClose}>
|
|
||||||
<CloseOutlined />
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<SingleChart
|
<SingleChart
|
||||||
chartTypeProp="line"
|
chartTypeProp="line"
|
||||||
wrapStyle={{
|
wrapStyle={{
|
||||||
width: 'auto',
|
width: 'auto',
|
||||||
height: 462,
|
height: 462,
|
||||||
}}
|
}}
|
||||||
|
// events 事件只注册一次,所以这里使用 ref 来执行防抖函数
|
||||||
onEvents={{
|
onEvents={{
|
||||||
dataZoom: (record: any) => {
|
dataZoom: (record: any) => debouncedZoomDrag?.current(record),
|
||||||
debounced(record);
|
|
||||||
},
|
|
||||||
}}
|
}}
|
||||||
|
showHeader={false}
|
||||||
propChartData={curMetricData.metricLines}
|
propChartData={curMetricData.metricLines}
|
||||||
optionMergeProps={{ notMerge: true }}
|
optionMergeProps={{ notMerge: true }}
|
||||||
getChartInstance={(chartInstance) => {
|
getChartInstance={(chartInstance) => {
|
||||||
chartInfo.current = {
|
setDisposeChartInstance(() => () => chartInstance.dispose());
|
||||||
...chartInfo.current,
|
updateChartInfo({
|
||||||
chartInstance,
|
chartInstance,
|
||||||
};
|
});
|
||||||
}}
|
}}
|
||||||
{...getDetailChartConfig(`${curMetricData.metricName}{unit|(${curMetricData.metricUnit})}`, chartInfo.current.sliderPos)}
|
{...getDetailChartConfig(`${curMetricData.metricName}{unit|(${curMetricData.metricUnit})}`, chartInfo.current.sliderPos)}
|
||||||
/>
|
/>
|
||||||
@@ -588,16 +624,10 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
className="detail-table"
|
className="detail-table"
|
||||||
rowKey="name"
|
rowKey="name"
|
||||||
rowSelection={{
|
rowSelection={{
|
||||||
// hideSelectAll: true,
|
|
||||||
preserveSelectedRowKeys: true,
|
preserveSelectedRowKeys: true,
|
||||||
selectedRowKeys: selectedLines,
|
selectedRowKeys: Object.entries(linesStatus)
|
||||||
// getCheckboxProps: (record) => {
|
.filter(([, status]) => status)
|
||||||
// return selectedLines.length <= 1 && selectedLines.includes(record.name)
|
.map(([name]) => name),
|
||||||
// ? {
|
|
||||||
// disabled: true,
|
|
||||||
// }
|
|
||||||
// : {};
|
|
||||||
// },
|
|
||||||
selections: [Table.SELECTION_INVERT, Table.SELECTION_NONE],
|
selections: [Table.SELECTION_INVERT, Table.SELECTION_NONE],
|
||||||
onChange: (keys: string[]) => tableLineChange(keys),
|
onChange: (keys: string[]) => tableLineChange(keys),
|
||||||
}}
|
}}
|
||||||
@@ -610,6 +640,8 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
pagination={false}
|
pagination={false}
|
||||||
/>
|
/>
|
||||||
</>
|
</>
|
||||||
|
) : (
|
||||||
|
!loading && <RenderEmpty message="详情加载失败,请重试" height={400} />
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</Spin>
|
</Spin>
|
||||||
@@ -618,22 +650,46 @@ const ChartDetail = (props: ChartDetailProps) => {
|
|||||||
|
|
||||||
// eslint-disable-next-line react/display-name
|
// eslint-disable-next-line react/display-name
|
||||||
const ChartDrawer = forwardRef((_, ref) => {
|
const ChartDrawer = forwardRef((_, ref) => {
|
||||||
|
const [global] = AppContainer.useGlobalValue();
|
||||||
const [visible, setVisible] = useState(false);
|
const [visible, setVisible] = useState(false);
|
||||||
const [dashboardType, setDashboardType] = useState<MetricType>();
|
|
||||||
const [metricName, setMetricName] = useState<string>();
|
|
||||||
const [queryLines, setQueryLines] = useState<string[]>([]);
|
const [queryLines, setQueryLines] = useState<string[]>([]);
|
||||||
|
const [sliderRange, setSliderRange] = useState<string>('');
|
||||||
|
const [disposeChartInstance, setDisposeChartInstance] = useState<() => void>(() => 0);
|
||||||
|
const [metricInfo, setMetricInfo] = useState<{
|
||||||
|
type: MetricType | undefined;
|
||||||
|
name: string;
|
||||||
|
unit: string;
|
||||||
|
desc: string;
|
||||||
|
}>({
|
||||||
|
type: undefined,
|
||||||
|
name: '',
|
||||||
|
unit: '',
|
||||||
|
desc: '',
|
||||||
|
});
|
||||||
|
|
||||||
const onOpen = (dashboardType: MetricType, metricName: string, queryLines: string[]) => {
|
const onOpen = (dashboardType: MetricType, metricName: string, queryLines: string[]) => {
|
||||||
setDashboardType(dashboardType);
|
const metricDefine = global.getMetricDefine(dashboardType, metricName);
|
||||||
setMetricName(metricName);
|
setMetricInfo({
|
||||||
|
type: dashboardType,
|
||||||
|
name: metricName,
|
||||||
|
unit: metricDefine?.unit || '',
|
||||||
|
desc: metricDefine?.desc || '',
|
||||||
|
});
|
||||||
setQueryLines(queryLines);
|
setQueryLines(queryLines);
|
||||||
setVisible(true);
|
setVisible(true);
|
||||||
};
|
};
|
||||||
|
|
||||||
const onClose = () => {
|
const onClose = () => {
|
||||||
setVisible(false);
|
setVisible(false);
|
||||||
setDashboardType(undefined);
|
setSliderRange('');
|
||||||
setMetricName(undefined);
|
disposeChartInstance();
|
||||||
|
setDisposeChartInstance(() => () => 0);
|
||||||
|
setMetricInfo({
|
||||||
|
type: undefined,
|
||||||
|
name: '',
|
||||||
|
unit: '',
|
||||||
|
desc: '',
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
useImperativeHandle(ref, () => ({
|
useImperativeHandle(ref, () => ({
|
||||||
@@ -641,9 +697,36 @@ const ChartDrawer = forwardRef((_, ref) => {
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Drawer width={1080} visible={visible} footer={null} closable={false} maskClosable={false} destroyOnClose={true} onClose={onClose}>
|
<Drawer
|
||||||
{dashboardType && metricName && (
|
className="overview-chart-detail-drawer"
|
||||||
<ChartDetail metricType={dashboardType} metricName={metricName} queryLines={queryLines} onClose={onClose} />
|
width={1080}
|
||||||
|
visible={visible}
|
||||||
|
title={
|
||||||
|
<div className="detail-header">
|
||||||
|
<div className="title">
|
||||||
|
<Tooltip placement="bottomLeft" title={metricInfo.desc}>
|
||||||
|
<span style={{ cursor: 'pointer' }}>
|
||||||
|
<span>{metricInfo.name}</span> <span className="unit">({metricInfo.unit}) </span>
|
||||||
|
</span>
|
||||||
|
</Tooltip>
|
||||||
|
</div>
|
||||||
|
<div className="slider-info">{sliderRange}</div>
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
footer={null}
|
||||||
|
closable={true}
|
||||||
|
maskClosable={false}
|
||||||
|
destroyOnClose={true}
|
||||||
|
onClose={onClose}
|
||||||
|
>
|
||||||
|
{metricInfo.type && metricInfo.name && (
|
||||||
|
<ChartDetail
|
||||||
|
metricType={metricInfo.type}
|
||||||
|
metricName={metricInfo.name}
|
||||||
|
queryLines={queryLines}
|
||||||
|
setSliderRange={setSliderRange}
|
||||||
|
setDisposeChartInstance={setDisposeChartInstance}
|
||||||
|
/>
|
||||||
)}
|
)}
|
||||||
</Drawer>
|
</Drawer>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -46,30 +46,42 @@ export const supplementaryPoints = (
|
|||||||
extraCallback?: (point: [number, 0]) => any[]
|
extraCallback?: (point: [number, 0]) => any[]
|
||||||
) => {
|
) => {
|
||||||
lines.forEach(({ data }) => {
|
lines.forEach(({ data }) => {
|
||||||
|
// 获取未补点前线条的点的个数
|
||||||
let len = data.length;
|
let len = data.length;
|
||||||
for (let i = 0; i < len; i++) {
|
// 记录当前处理到的点的下标值
|
||||||
const timestamp = data[i][0] as number;
|
let i = 0;
|
||||||
// 数组第一个点和最后一个点单独处理
|
|
||||||
|
for (; i < len; i++) {
|
||||||
if (i === 0) {
|
if (i === 0) {
|
||||||
let firstPointTimestamp = data[0][0] as number;
|
let firstPointTimestamp = data[0][0] as number;
|
||||||
while (firstPointTimestamp - interval > timeRange[0]) {
|
while (firstPointTimestamp - interval > timeRange[0]) {
|
||||||
const prePointTimestamp = firstPointTimestamp - interval;
|
const prevPointTimestamp = firstPointTimestamp - interval;
|
||||||
data.unshift(extraCallback ? extraCallback([prePointTimestamp, 0]) : [prePointTimestamp, 0]);
|
data.unshift(extraCallback ? extraCallback([prevPointTimestamp, 0]) : [prevPointTimestamp, 0]);
|
||||||
|
firstPointTimestamp = prevPointTimestamp;
|
||||||
len++;
|
len++;
|
||||||
i++;
|
i++;
|
||||||
firstPointTimestamp = prePointTimestamp;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (i === len - 1) {
|
if (i === len - 1) {
|
||||||
let lastPointTimestamp = data[len - 1][0] as number;
|
let lastPointTimestamp = data[i][0] as number;
|
||||||
while (lastPointTimestamp + interval < timeRange[1]) {
|
while (lastPointTimestamp + interval < timeRange[1]) {
|
||||||
const next = lastPointTimestamp + interval;
|
const nextPointTimestamp = lastPointTimestamp + interval;
|
||||||
data.push(extraCallback ? extraCallback([next, 0]) : [next, 0]);
|
data.push(extraCallback ? extraCallback([nextPointTimestamp, 0]) : [nextPointTimestamp, 0]);
|
||||||
lastPointTimestamp = next;
|
lastPointTimestamp = nextPointTimestamp;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let timestamp = data[i][0] as number;
|
||||||
|
while (timestamp + interval < data[i + 1][0]) {
|
||||||
|
const nextPointTimestamp = timestamp + interval;
|
||||||
|
data.splice(i + 1, 0, extraCallback ? extraCallback([nextPointTimestamp, 0]) : [nextPointTimestamp, 0]);
|
||||||
|
timestamp = nextPointTimestamp;
|
||||||
|
len++;
|
||||||
|
i++;
|
||||||
}
|
}
|
||||||
} else if (timestamp + interval < data[i + 1][0]) {
|
|
||||||
data.splice(i + 1, 0, extraCallback ? extraCallback([timestamp + interval, 0]) : [timestamp + interval, 0]);
|
|
||||||
len++;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -135,18 +147,37 @@ export const formatChartData = (
|
|||||||
};
|
};
|
||||||
|
|
||||||
const seriesCallback = (lines: { name: string; data: [number, string | number][] }[]) => {
|
const seriesCallback = (lines: { name: string; data: [number, string | number][] }[]) => {
|
||||||
|
const len = CHART_COLOR_LIST.length;
|
||||||
// series 配置
|
// series 配置
|
||||||
return lines.map((line) => {
|
return lines.map((line, i) => {
|
||||||
return {
|
return {
|
||||||
...line,
|
...line,
|
||||||
lineStyle: {
|
lineStyle: {
|
||||||
width: 1.5,
|
width: 1.5,
|
||||||
},
|
},
|
||||||
|
connectNulls: false,
|
||||||
symbol: 'emptyCircle',
|
symbol: 'emptyCircle',
|
||||||
symbolSize: 4,
|
symbolSize: 4,
|
||||||
smooth: 0.25,
|
smooth: 0.25,
|
||||||
areaStyle: {
|
areaStyle: {
|
||||||
opacity: 0.02,
|
color: {
|
||||||
|
type: 'linear',
|
||||||
|
x: 0,
|
||||||
|
y: 0,
|
||||||
|
x2: 0,
|
||||||
|
y2: 1,
|
||||||
|
colorStops: [
|
||||||
|
{
|
||||||
|
offset: 0,
|
||||||
|
color: CHART_COLOR_LIST[i % len] + '10',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
offset: 1,
|
||||||
|
color: 'rgba(255,255,255,0)', // 100% 处的颜色
|
||||||
|
},
|
||||||
|
],
|
||||||
|
global: false, // 缺省为 false
|
||||||
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
@@ -189,6 +220,7 @@ export const getDetailChartConfig = (title: string, sliderPos: readonly [number,
|
|||||||
startValue: sliderPos[0],
|
startValue: sliderPos[0],
|
||||||
endValue: sliderPos[1],
|
endValue: sliderPos[1],
|
||||||
zoomOnMouseWheel: false,
|
zoomOnMouseWheel: false,
|
||||||
|
minValueSpan: 10 * 60 * 1000,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
start: 0,
|
start: 0,
|
||||||
|
|||||||
@@ -63,56 +63,63 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
.overview-chart-detail-drawer {
|
||||||
.chart-detail-modal-container {
|
.dcloud-spin-nested-loading > div > .dcloud-spin.dcloud-spin-spinning {
|
||||||
position: relative;
|
height: 300px;
|
||||||
.expand-icon-box {
|
}
|
||||||
position: absolute;
|
&.dcloud-drawer .dcloud-drawer-body {
|
||||||
z-index: 1000;
|
padding: 0 20px;
|
||||||
top: 14px;
|
}
|
||||||
right: 44px;
|
.detail-header {
|
||||||
width: 24px;
|
display: flex;
|
||||||
height: 24px;
|
align-items: flex-end;
|
||||||
cursor: pointer;
|
font-weight: normal;
|
||||||
font-size: 16px;
|
.title {
|
||||||
text-align: center;
|
font-family: @font-family-bold;
|
||||||
border-radius: 50%;
|
font-size: 18px;
|
||||||
transition: background-color 0.3s ease;
|
color: #495057;
|
||||||
.expand-icon {
|
letter-spacing: 0;
|
||||||
color: #adb5bc;
|
.unit {
|
||||||
line-height: 24px;
|
font-family: @font-family-bold;
|
||||||
}
|
font-size: 14px;
|
||||||
&:hover {
|
letter-spacing: 0.5px;
|
||||||
background: rgba(33, 37, 41, 0.04);
|
|
||||||
.expand-icon {
|
|
||||||
color: #74788d;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
.slider-info {
|
||||||
|
margin-left: 10px;
|
||||||
|
font-size: 12px;
|
||||||
|
font-family: @font-family;
|
||||||
|
color: #303a51;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
.detail-title {
|
.chart-detail-modal-container {
|
||||||
display: flex;
|
position: relative;
|
||||||
justify-content: space-between;
|
overflow: hidden;
|
||||||
align-items: center;
|
.expand-icon-box {
|
||||||
.left {
|
position: absolute;
|
||||||
display: flex;
|
z-index: 1000;
|
||||||
align-items: flex-end;
|
top: 14px;
|
||||||
.title {
|
right: 44px;
|
||||||
font-family: @font-family-bold;
|
width: 24px;
|
||||||
font-size: 18px;
|
height: 24px;
|
||||||
color: #495057;
|
cursor: pointer;
|
||||||
letter-spacing: 0;
|
font-size: 16px;
|
||||||
.unit {
|
text-align: center;
|
||||||
font-family: @font-family-bold;
|
border-radius: 50%;
|
||||||
font-size: 14px;
|
transition: background-color 0.3s ease;
|
||||||
letter-spacing: 0.5px;
|
.expand-icon {
|
||||||
|
color: #adb5bc;
|
||||||
|
line-height: 24px;
|
||||||
|
}
|
||||||
|
&:hover {
|
||||||
|
background: rgba(33, 37, 41, 0.04);
|
||||||
|
.expand-icon {
|
||||||
|
color: #74788d;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.info {
|
}
|
||||||
margin-left: 10px;
|
.detail-table {
|
||||||
}
|
margin-top: 16px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.detail-table {
|
|
||||||
margin-top: 16px;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -216,8 +216,8 @@ const DashboardDragChart = (props: PropsType): JSX.Element => {
|
|||||||
onChange={ksHeaderChange}
|
onChange={ksHeaderChange}
|
||||||
nodeScopeModule={{
|
nodeScopeModule={{
|
||||||
customScopeList: scopeList,
|
customScopeList: scopeList,
|
||||||
scopeName: `自定义 ${dashboardType === MetricType.Broker ? 'Broker' : 'Topic'} 范围`,
|
scopeName: dashboardType === MetricType.Broker ? 'Broker' : 'Topic',
|
||||||
showSearch: dashboardType === MetricType.Topic,
|
scopeLabel: `自定义 ${dashboardType === MetricType.Broker ? 'Broker' : 'Topic'} 范围`,
|
||||||
}}
|
}}
|
||||||
indicatorSelectModule={{
|
indicatorSelectModule={{
|
||||||
hide: false,
|
hide: false,
|
||||||
|
|||||||
@@ -0,0 +1,15 @@
|
|||||||
|
import React from 'react';
|
||||||
|
|
||||||
|
const RenderEmpty = (props: { height?: string | number; message: string }) => {
|
||||||
|
const { height = 200, message } = props;
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="empty-panel" style={{ height }}>
|
||||||
|
<div className="img" />
|
||||||
|
<div className="text">{message}</div>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default RenderEmpty;
|
||||||
@@ -26,8 +26,8 @@ const OptionsDefault = [
|
|||||||
const NodeScope = ({ nodeScopeModule, change }: propsType) => {
|
const NodeScope = ({ nodeScopeModule, change }: propsType) => {
|
||||||
const {
|
const {
|
||||||
customScopeList: customList,
|
customScopeList: customList,
|
||||||
scopeName = '自定义节点范围',
|
scopeName = '',
|
||||||
showSearch = false,
|
scopeLabel = '自定义范围',
|
||||||
searchPlaceholder = '输入内容进行搜索',
|
searchPlaceholder = '输入内容进行搜索',
|
||||||
} = nodeScopeModule;
|
} = nodeScopeModule;
|
||||||
const [topNum, setTopNum] = useState<number>(5);
|
const [topNum, setTopNum] = useState<number>(5);
|
||||||
@@ -70,7 +70,7 @@ const NodeScope = ({ nodeScopeModule, change }: propsType) => {
|
|||||||
change(checkedListTemp, false);
|
change(checkedListTemp, false);
|
||||||
setIsTop(false);
|
setIsTop(false);
|
||||||
setTopNum(null);
|
setTopNum(null);
|
||||||
setInputValue(`已选${checkedListTemp?.length}项`);
|
setInputValue(`${checkedListTemp?.length}项`);
|
||||||
setPopVisible(false);
|
setPopVisible(false);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -109,7 +109,7 @@ const NodeScope = ({ nodeScopeModule, change }: propsType) => {
|
|||||||
{/* <span>时间:</span> */}
|
{/* <span>时间:</span> */}
|
||||||
<div className="flx_con">
|
<div className="flx_con">
|
||||||
<div className="flx_l">
|
<div className="flx_l">
|
||||||
<h6 className="time_title">选择top范围</h6>
|
<h6 className="time_title">选择 top 范围</h6>
|
||||||
<Radio.Group
|
<Radio.Group
|
||||||
optionType="button"
|
optionType="button"
|
||||||
buttonStyle="solid"
|
buttonStyle="solid"
|
||||||
@@ -128,7 +128,7 @@ const NodeScope = ({ nodeScopeModule, change }: propsType) => {
|
|||||||
</Radio.Group>
|
</Radio.Group>
|
||||||
</div>
|
</div>
|
||||||
<div className="flx_r">
|
<div className="flx_r">
|
||||||
<h6 className="time_title">{scopeName}</h6>
|
<h6 className="time_title">{scopeLabel}</h6>
|
||||||
<div className="custom-scope">
|
<div className="custom-scope">
|
||||||
<div className="check-row">
|
<div className="check-row">
|
||||||
<Checkbox className="check-all" indeterminate={indeterminate} onChange={onCheckAllChange} checked={checkAll}>
|
<Checkbox className="check-all" indeterminate={indeterminate} onChange={onCheckAllChange} checked={checkAll}>
|
||||||
@@ -136,9 +136,7 @@ const NodeScope = ({ nodeScopeModule, change }: propsType) => {
|
|||||||
</Checkbox>
|
</Checkbox>
|
||||||
<Input
|
<Input
|
||||||
className="search-input"
|
className="search-input"
|
||||||
suffix={
|
suffix={<IconFont type="icon-fangdajing" style={{ fontSize: '16px' }} />}
|
||||||
<IconFont type="icon-fangdajing" style={{ fontSize: '16px' }} />
|
|
||||||
}
|
|
||||||
size="small"
|
size="small"
|
||||||
placeholder={searchPlaceholder}
|
placeholder={searchPlaceholder}
|
||||||
onChange={(e) => setScopeSearchValue(e.target.value)}
|
onChange={(e) => setScopeSearchValue(e.target.value)}
|
||||||
@@ -148,7 +146,7 @@ const NodeScope = ({ nodeScopeModule, change }: propsType) => {
|
|||||||
<Checkbox.Group style={{ width: '100%' }} onChange={checkChange} value={checkedListTemp}>
|
<Checkbox.Group style={{ width: '100%' }} onChange={checkChange} value={checkedListTemp}>
|
||||||
<Row gutter={[10, 12]}>
|
<Row gutter={[10, 12]}>
|
||||||
{customList
|
{customList
|
||||||
.filter((item) => !showSearch || item.label.includes(scopeSearchValue))
|
.filter((item) => item.label.includes(scopeSearchValue))
|
||||||
.map((item) => (
|
.map((item) => (
|
||||||
<Col span={12} key={item.value}>
|
<Col span={12} key={item.value}>
|
||||||
<Checkbox value={item.value}>{item.label}</Checkbox>
|
<Checkbox value={item.value}>{item.label}</Checkbox>
|
||||||
@@ -180,6 +178,7 @@ const NodeScope = ({ nodeScopeModule, change }: propsType) => {
|
|||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div id="d-node-scope">
|
<div id="d-node-scope">
|
||||||
|
<div className="scope-title">{scopeName}筛选:</div>
|
||||||
<Popover
|
<Popover
|
||||||
trigger={['click']}
|
trigger={['click']}
|
||||||
visible={popVisible}
|
visible={popVisible}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import React, { useEffect, useState } from 'react';
|
import React, { useEffect, useState } from 'react';
|
||||||
import { Tooltip, Select, IconFont, Utils, Divider } from 'knowdesign';
|
import { Tooltip, Select, IconFont, Utils, Divider, Button } from 'knowdesign';
|
||||||
import moment from 'moment';
|
import moment from 'moment';
|
||||||
import { DRangeTime } from 'knowdesign';
|
import { DRangeTime } from 'knowdesign';
|
||||||
import IndicatorDrawer from './IndicatorDrawer';
|
import IndicatorDrawer from './IndicatorDrawer';
|
||||||
@@ -48,7 +48,7 @@ export interface IcustomScope {
|
|||||||
export interface InodeScopeModule {
|
export interface InodeScopeModule {
|
||||||
customScopeList: IcustomScope[];
|
customScopeList: IcustomScope[];
|
||||||
scopeName?: string;
|
scopeName?: string;
|
||||||
showSearch?: boolean;
|
scopeLabel?: string;
|
||||||
searchPlaceholder?: string;
|
searchPlaceholder?: string;
|
||||||
change?: () => void;
|
change?: () => void;
|
||||||
}
|
}
|
||||||
@@ -138,9 +138,13 @@ const SingleChartHeader = ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const reloadRangeTime = () => {
|
const reloadRangeTime = () => {
|
||||||
const timeLen = rangeTime[1] - rangeTime[0] || 0;
|
if (isRelativeRangeTime) {
|
||||||
const curTimeStamp = moment().valueOf();
|
const timeLen = rangeTime[1] - rangeTime[0] || 0;
|
||||||
setRangeTime([curTimeStamp - timeLen, curTimeStamp]);
|
const curTimeStamp = moment().valueOf();
|
||||||
|
setRangeTime([curTimeStamp - timeLen, curTimeStamp]);
|
||||||
|
} else {
|
||||||
|
setRangeTime([...rangeTime]);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const openIndicatorDrawer = () => {
|
const openIndicatorDrawer = () => {
|
||||||
@@ -174,12 +178,10 @@ const SingleChartHeader = ({
|
|||||||
{!hideGridSelect && (
|
{!hideGridSelect && (
|
||||||
<Select className="grid-select" style={{ width: 70 }} value={gridNum} options={GRID_SIZE_OPTIONS} onChange={sizeChange} />
|
<Select className="grid-select" style={{ width: 70 }} value={gridNum} options={GRID_SIZE_OPTIONS} onChange={sizeChange} />
|
||||||
)}
|
)}
|
||||||
<Divider type="vertical" style={{ height: 20, top: 0 }} />
|
{(!hideNodeScope || !hideGridSelect) && <Divider type="vertical" style={{ height: 20, top: 0 }} />}
|
||||||
<Tooltip title="点击指标筛选,可选择指标" placement="bottomRight">
|
<Button type="primary" onClick={openIndicatorDrawer}>
|
||||||
<div className="icon-box" onClick={openIndicatorDrawer}>
|
指标筛选
|
||||||
<IconFont className="icon" type="icon-shezhi1" />
|
</Button>
|
||||||
</div>
|
|
||||||
</Tooltip>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -3,8 +3,13 @@
|
|||||||
@import '~knowdesign/es/basic/style/mixins/index';
|
@import '~knowdesign/es/basic/style/mixins/index';
|
||||||
|
|
||||||
#d-node-scope {
|
#d-node-scope {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
position: relative;
|
position: relative;
|
||||||
display: inline-block;
|
.scope-title {
|
||||||
|
font-size: 14px;
|
||||||
|
color: #74788d;
|
||||||
|
}
|
||||||
.input-span {
|
.input-span {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
@@ -29,10 +34,10 @@
|
|||||||
box-shadow: none;
|
box-shadow: none;
|
||||||
}
|
}
|
||||||
&.relativeTime {
|
&.relativeTime {
|
||||||
width: 160px;
|
width: 200px;
|
||||||
}
|
}
|
||||||
&.absoluteTime {
|
&.absoluteTime {
|
||||||
width: 300px;
|
width: 200px;
|
||||||
}
|
}
|
||||||
|
|
||||||
input {
|
input {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { DownOutlined } from '@ant-design/icons';
|
import { DownOutlined } from '@ant-design/icons';
|
||||||
import { Popover } from 'knowdesign';
|
import { Popover } from 'knowdesign';
|
||||||
import { TooltipPlacement } from 'knowdesign/lib/basic/tooltip';
|
import { TooltipPlacement } from 'knowdesign/es/basic/tooltip';
|
||||||
import React, { useState, useRef, useEffect } from 'react';
|
import React, { useState, useRef, useEffect } from 'react';
|
||||||
import './index.less';
|
import './index.less';
|
||||||
|
|
||||||
@@ -93,8 +93,9 @@ export default (props: PropsType) => {
|
|||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
key={i}
|
key={i}
|
||||||
className={`container-item ${curState.calculated ? (curState.isHideExpandNode ? 'show' : i >= curState.endI ? 'hide' : 'show') : ''
|
className={`container-item ${
|
||||||
}`}
|
curState.calculated ? (curState.isHideExpandNode ? 'show' : i >= curState.endI ? 'hide' : 'show') : ''
|
||||||
|
}`}
|
||||||
>
|
>
|
||||||
{item}
|
{item}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -30,8 +30,8 @@ const { TextArea } = Input;
|
|||||||
const { Option } = Select;
|
const { Option } = Select;
|
||||||
|
|
||||||
const jobNameMap: any = {
|
const jobNameMap: any = {
|
||||||
expandAndReduce: '批量扩缩副本',
|
expandAndReduce: '扩缩副本',
|
||||||
transfer: '批量迁移副本',
|
transfer: '迁移副本',
|
||||||
};
|
};
|
||||||
|
|
||||||
interface DefaultConfig {
|
interface DefaultConfig {
|
||||||
@@ -325,8 +325,7 @@ export default (props: DefaultConfig) => {
|
|||||||
!jobId &&
|
!jobId &&
|
||||||
Utils.request(Api.getTopicMetaData(+routeParams.clusterId))
|
Utils.request(Api.getTopicMetaData(+routeParams.clusterId))
|
||||||
.then((res: any) => {
|
.then((res: any) => {
|
||||||
const filterRes = res.filter((item: any) => item.type !== 1);
|
const topics = (res || []).map((item: any) => {
|
||||||
const topics = (filterRes || []).map((item: any) => {
|
|
||||||
return {
|
return {
|
||||||
label: item.topicName,
|
label: item.topicName,
|
||||||
value: item.topicName,
|
value: item.topicName,
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user