mirror of
https://github.com/didi/KnowStreaming.git
synced 2025-12-24 11:52:08 +08:00
Compare commits
282 Commits
v3.0.0-bet
...
v3.0.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
045f65204b | ||
|
|
f414b47a78 | ||
|
|
44f4e2f0f9 | ||
|
|
2361008bdf | ||
|
|
7377ef3ec5 | ||
|
|
a28d064b7a | ||
|
|
e2e57e8575 | ||
|
|
9d90bd2835 | ||
|
|
7445e68df4 | ||
|
|
ab42625ad2 | ||
|
|
18789a0a53 | ||
|
|
68a37bb56a | ||
|
|
3b33652c47 | ||
|
|
1e0c4c3904 | ||
|
|
04e223de16 | ||
|
|
c4a691aa8a | ||
|
|
ff9dde163a | ||
|
|
eb7efbd1a5 | ||
|
|
8c8c362c54 | ||
|
|
66e119ad5d | ||
|
|
6dedc04a05 | ||
|
|
0cf8bad0df | ||
|
|
95c9582d8b | ||
|
|
7815126ff5 | ||
|
|
a5fa9de54b | ||
|
|
95f1a2c630 | ||
|
|
1e256ae1fd | ||
|
|
9fc9c54fa1 | ||
|
|
1b362b1e02 | ||
|
|
04e3172cca | ||
|
|
1caab7f3f7 | ||
|
|
9d33c725ad | ||
|
|
6ed1d38106 | ||
|
|
0f07ddedaf | ||
|
|
289945b471 | ||
|
|
f331a6d144 | ||
|
|
0c8c12a651 | ||
|
|
028c3bb2fa | ||
|
|
d7a5a0d405 | ||
|
|
5ef5f6e531 | ||
|
|
1d205734b3 | ||
|
|
5edd43884f | ||
|
|
c1992373bc | ||
|
|
ed562f9c8a | ||
|
|
b4d44ef8c7 | ||
|
|
ad0c16a1b4 | ||
|
|
7eabe66853 | ||
|
|
3983d73695 | ||
|
|
161d4c4562 | ||
|
|
9a1e89564e | ||
|
|
0c18c5b4f6 | ||
|
|
3e12ba34f7 | ||
|
|
e71e29391b | ||
|
|
9b7b9a7af0 | ||
|
|
a23819c308 | ||
|
|
6cb1825d96 | ||
|
|
77b8c758dc | ||
|
|
e5a582cfad | ||
|
|
ec83db267e | ||
|
|
bfd026cae7 | ||
|
|
35f1dd8082 | ||
|
|
7ed0e7dd23 | ||
|
|
1a3cbf7a9d | ||
|
|
d9e4abc3de | ||
|
|
a4186085d3 | ||
|
|
26b1846bb4 | ||
|
|
1aa89527a6 | ||
|
|
eac76d7ad0 | ||
|
|
cea0cd56f6 | ||
|
|
c4b897f282 | ||
|
|
47389dbabb | ||
|
|
a2f8b1a851 | ||
|
|
feac0a058f | ||
|
|
27eeac9fd4 | ||
|
|
a14db4b194 | ||
|
|
54ee271a47 | ||
|
|
a3a9be4f7f | ||
|
|
d4f0a832f3 | ||
|
|
7dc533372c | ||
|
|
1737d87713 | ||
|
|
dbb98dea11 | ||
|
|
802b382b36 | ||
|
|
fc82999d45 | ||
|
|
08aa000c07 | ||
|
|
39015b5100 | ||
|
|
0d635ad419 | ||
|
|
9133205915 | ||
|
|
725ac10c3d | ||
|
|
2b76358c8f | ||
|
|
833c360698 | ||
|
|
7da1e67b01 | ||
|
|
7eb86a47dd | ||
|
|
d67e383c28 | ||
|
|
8749d3e1f5 | ||
|
|
30fba21c48 | ||
|
|
d83d35aee9 | ||
|
|
1d3caeea7d | ||
|
|
26916f6632 | ||
|
|
fbfa0d2d2a | ||
|
|
e626b99090 | ||
|
|
203859b71b | ||
|
|
9a25c22f3a | ||
|
|
0a03f41a7c | ||
|
|
56191939c8 | ||
|
|
beb754aaaa | ||
|
|
f234f740ca | ||
|
|
e14679694c | ||
|
|
e06712397e | ||
|
|
b6c6df7ffc | ||
|
|
375c6f56c9 | ||
|
|
0bf85c97b5 | ||
|
|
630e582321 | ||
|
|
a89fe23bdd | ||
|
|
a7a5fa9a31 | ||
|
|
c73a7eee2f | ||
|
|
121f8468d5 | ||
|
|
7b0b6936e0 | ||
|
|
597ea04a96 | ||
|
|
f7f90aeaaa | ||
|
|
227479f695 | ||
|
|
6477fb3fe0 | ||
|
|
4223f4f3c4 | ||
|
|
7288874d72 | ||
|
|
68f76f2daf | ||
|
|
fe6ddebc49 | ||
|
|
12b5acd073 | ||
|
|
a6f1fe07b3 | ||
|
|
85e3f2a946 | ||
|
|
d4f416de14 | ||
|
|
0d9a6702c1 | ||
|
|
d11285cdbf | ||
|
|
5f1f33d2b9 | ||
|
|
474daf752d | ||
|
|
27d1b92690 | ||
|
|
993afa4c19 | ||
|
|
028d891c32 | ||
|
|
0df55ec22d | ||
|
|
579f64774d | ||
|
|
792f8d939d | ||
|
|
e4fb02fcda | ||
|
|
0c14c641d0 | ||
|
|
dba671fd1e | ||
|
|
80d1693722 | ||
|
|
26014a11b2 | ||
|
|
848fddd55a | ||
|
|
97f5f05f1a | ||
|
|
25b82810f2 | ||
|
|
9b1e506fa7 | ||
|
|
7a42996e97 | ||
|
|
dbfcebcf67 | ||
|
|
37c3f69a28 | ||
|
|
5d412890b4 | ||
|
|
1e318a4c40 | ||
|
|
d4549176ec | ||
|
|
61efdf492f | ||
|
|
67ea4d44c8 | ||
|
|
fdae05a4aa | ||
|
|
5efb837ee8 | ||
|
|
584b626d93 | ||
|
|
de25a4ed8e | ||
|
|
2e852e5ca6 | ||
|
|
b11000715a | ||
|
|
b3f8b46f0f | ||
|
|
8d22a0664a | ||
|
|
20756a3453 | ||
|
|
c9b4d45a64 | ||
|
|
83f7f5468b | ||
|
|
59c042ad67 | ||
|
|
d550fc5068 | ||
|
|
6effba69a0 | ||
|
|
9b46956259 | ||
|
|
b5a4a732da | ||
|
|
487862367e | ||
|
|
5b63b9ce67 | ||
|
|
afbcd3e1df | ||
|
|
12b82c1395 | ||
|
|
863b765e0d | ||
|
|
731429c51c | ||
|
|
66f3bc61fe | ||
|
|
4efe35dd51 | ||
|
|
c92461ef93 | ||
|
|
405e6e0c1d | ||
|
|
0d227aef49 | ||
|
|
0e49002f42 | ||
|
|
2e016800e0 | ||
|
|
09f317b991 | ||
|
|
5a48cb1547 | ||
|
|
f632febf33 | ||
|
|
3c53467943 | ||
|
|
d358c0f4f7 | ||
|
|
de977a5b32 | ||
|
|
703d685d59 | ||
|
|
31a5f17408 | ||
|
|
c40ae3c455 | ||
|
|
b71a34279e | ||
|
|
8f8c0c4eda | ||
|
|
3a384f0e34 | ||
|
|
cf7bc11cbd | ||
|
|
be60ae8399 | ||
|
|
8e50d145d5 | ||
|
|
7a3d15525c | ||
|
|
64f32d8b24 | ||
|
|
949d6ba605 | ||
|
|
ceb8db09f4 | ||
|
|
ed05a0ebb8 | ||
|
|
a7cbb76655 | ||
|
|
93cbfa0b1f | ||
|
|
6120613a98 | ||
|
|
dbd00db159 | ||
|
|
befde952f5 | ||
|
|
1aa759e5be | ||
|
|
2de27719c1 | ||
|
|
21db57b537 | ||
|
|
dfe8d09477 | ||
|
|
90dfa22c64 | ||
|
|
0f35427645 | ||
|
|
7909f60ff8 | ||
|
|
9a1a8a4c30 | ||
|
|
fa7ad64140 | ||
|
|
8a0c23339d | ||
|
|
e7ab3aff16 | ||
|
|
d0948797b9 | ||
|
|
04a5e17451 | ||
|
|
47065c8042 | ||
|
|
488c778736 | ||
|
|
d10a7bcc75 | ||
|
|
afe44a2537 | ||
|
|
9eadafe850 | ||
|
|
dab3eefcc0 | ||
|
|
2b9a6b28d8 | ||
|
|
465f98ca2b | ||
|
|
a0312be4fd | ||
|
|
4a5161372b | ||
|
|
4c9921f752 | ||
|
|
6dd72d40ee | ||
|
|
db49c234bb | ||
|
|
4a9df0c4d9 | ||
|
|
461573c2ba | ||
|
|
291992753f | ||
|
|
fcefe7ac38 | ||
|
|
7da712fcff | ||
|
|
2fd8687624 | ||
|
|
639b1f8336 | ||
|
|
ab3b83e42a | ||
|
|
4818629c40 | ||
|
|
61784c860a | ||
|
|
d5667254f2 | ||
|
|
af2b93983f | ||
|
|
8281301cbd | ||
|
|
0043ab8371 | ||
|
|
500eaace82 | ||
|
|
28e8540c78 | ||
|
|
69adf682e2 | ||
|
|
69cd1ff6e1 | ||
|
|
415d67cc32 | ||
|
|
46a2fec79b | ||
|
|
560b322fca | ||
|
|
effe17ac85 | ||
|
|
7699acfc1b | ||
|
|
6e058240b3 | ||
|
|
f005c6bc44 | ||
|
|
7be462599f | ||
|
|
271ab432d9 | ||
|
|
4114777a4e | ||
|
|
9189a54442 | ||
|
|
b95ee762e3 | ||
|
|
9e3c4dc06b | ||
|
|
1891a3ac86 | ||
|
|
9ecdcac06d | ||
|
|
790cb6a2e1 | ||
|
|
4a98e5f025 | ||
|
|
507abc1d84 | ||
|
|
9b732fbbad | ||
|
|
220f1c6fc3 | ||
|
|
7a950c67b6 | ||
|
|
78f625dc8c | ||
|
|
211d26a3ed | ||
|
|
dce2bc6326 | ||
|
|
90e5d7f6f0 | ||
|
|
fc835e09c6 | ||
|
|
c6e782a637 | ||
|
|
1ddfbfc833 |
@@ -13,7 +13,7 @@ Before sending pull request to this project, please read and follow guidelines b
|
||||
|
||||
Add device mode, API version, related log, screenshots and other related information in your pull request if possible.
|
||||
|
||||
NOTE: We assume all your contribution can be licensed under the [Apache License 2.0](LICENSE).
|
||||
NOTE: We assume all your contribution can be licensed under the [AGPL-3.0](LICENSE).
|
||||
|
||||
## Issues
|
||||
|
||||
|
||||
12
README.md
12
README.md
@@ -51,16 +51,16 @@
|
||||
- 无需侵入改造 `Apache Kafka` ,一键便能纳管 `0.10.x` ~ `3.x.x` 众多版本的Kafka,包括 `ZK` 或 `Raft` 运行模式的版本,同时在兼容架构上具备良好的扩展性,帮助您提升集群管理水平;
|
||||
|
||||
- 🌪️ **零成本、界面化**
|
||||
- 提炼高频 CLI 能力,设计合理的产品路径,提供清新美观的 GUI 界面,支持 Cluster、Broker、Topic、Group、Message、ACL 等组件 GUI 管理,普通用户5分钟即可上手;
|
||||
- 提炼高频 CLI 能力,设计合理的产品路径,提供清新美观的 GUI 界面,支持 Cluster、Broker、Zookeeper、Topic、ConsumerGroup、Message、ACL、Connect 等组件 GUI 管理,普通用户5分钟即可上手;
|
||||
|
||||
- 👏 **云原生、插件化**
|
||||
- 基于云原生构建,具备水平扩展能力,只需要增加节点即可获取更强的采集及对外服务能力,提供众多可热插拔的企业级特性,覆盖可观测性生态整合、资源治理、多活容灾等核心场景;
|
||||
|
||||
- 🚀 **专业能力**
|
||||
- 集群管理:支持集群一键纳管,健康分析、核心组件观测 等功能;
|
||||
- 集群管理:支持一键纳管,健康分析、核心组件观测 等功能;
|
||||
- 观测提升:多维度指标观测大盘、观测指标最佳实践 等功能;
|
||||
- 异常巡检:集群多维度健康巡检、集群多维度健康分 等功能;
|
||||
- 能力增强:Topic扩缩副本、Topic副本迁移 等功能;
|
||||
- 能力增强:集群负载均衡、Topic扩缩副本、Topic副本迁移 等功能;
|
||||
|
||||
|
||||
|
||||
@@ -133,3 +133,9 @@ PS: 提问请尽量把问题一次性描述清楚,并告知环境信息情况
|
||||
**`2、微信群`**
|
||||
|
||||
微信加群:添加`mike_zhangliang`、`PenceXie`的微信号备注KnowStreaming加群。
|
||||
<br/>
|
||||
<img width="116" alt="wx" src="https://user-images.githubusercontent.com/71620349/192257217-c4ebc16c-3ad9-485d-a914-5911d3a4f46b.png">
|
||||
|
||||
## Star History
|
||||
|
||||
[](https://star-history.com/#didi/KnowStreaming&Date)
|
||||
|
||||
@@ -1,6 +1,159 @@
|
||||
|
||||
|
||||
## v3.0.0-beta
|
||||
## v3.0.0
|
||||
|
||||
**Bug修复**
|
||||
- 修复 Group 指标防重复采集不生效问题
|
||||
- 修复自动创建 ES 索引模版失败问题
|
||||
- 修复 Group+Topic 列表中存在已删除Topic的问题
|
||||
- 修复使用 MySQL-8 ,因兼容问题, start_time 信息为 NULL 时,会导致创建任务失败的问题
|
||||
- 修复 Group 信息表更新时,出现死锁的问题
|
||||
- 修复图表补点逻辑与图表时间范围不适配的问题
|
||||
|
||||
|
||||
**体验优化**
|
||||
- 按照资源类别,拆分健康巡检任务
|
||||
- 优化 Group 详情页的指标为实时获取
|
||||
- 图表拖拽排序支持用户级存储
|
||||
- 多集群列表 ZK 信息展示兼容无 ZK 情况
|
||||
- Topic 详情消息预览支持复制功能
|
||||
- 部分内容大数字支持千位分割符展示
|
||||
|
||||
|
||||
**新增**
|
||||
- 集群信息中,新增 Zookeeper 客户端配置字段
|
||||
- 集群信息中,新增 Kafka 集群运行模式字段
|
||||
- 新增 docker-compose 的部署方式
|
||||
|
||||
|
||||
|
||||
## v3.0.0-beta.3
|
||||
|
||||
**文档**
|
||||
- FAQ 补充权限识别失败问题的说明
|
||||
- 同步更新文档,保持与官网一致
|
||||
|
||||
|
||||
**Bug修复**
|
||||
- Offset 信息获取时,过滤掉无 Leader 的分区
|
||||
- 升级 oshi-core 版本至 5.6.1 版本,修复 Windows 系统获取系统指标失败问题
|
||||
- 修复 JMX 连接被关闭后,未进行重建的问题
|
||||
- 修复因 DB 中 Broker 信息不存在导致 TotalLogSize 指标获取时抛空指针问题
|
||||
- 修复 dml-logi.sql 中,SQL 注释错误的问题
|
||||
- 修复 startup.sh 中,识别操作系统类型错误的问题
|
||||
- 修复配置管理页面删除配置失败的问题
|
||||
- 修复系统管理应用文件引用路径
|
||||
- 修复 Topic Messages 详情提示信息点击跳转 404 的问题
|
||||
- 修复扩副本时,当前副本数不显示问题
|
||||
|
||||
|
||||
**体验优化**
|
||||
- Topic-Messages 页面,增加返回数据的排序以及按照Earliest/Latest的获取方式
|
||||
- 优化 GroupOffsetResetEnum 类名为 OffsetTypeEnum,使得类名含义更准确
|
||||
- 移动 KafkaZKDAO 类,及 Kafka Znode 实体类的位置,使得 Kafka Zookeeper DAO 更加内聚及便于识别
|
||||
- 后端补充 Overview 页面指标排序的功能
|
||||
- 前端 Webpack 配置优化
|
||||
- Cluster Overview 图表取消放大展示功能
|
||||
- 列表页增加手动刷新功能
|
||||
- 接入/编辑集群,优化 JMX-PORT,Version 信息的回显,优化JMX信息的展示
|
||||
- 提高登录页面图片展示清晰度
|
||||
- 部分样式和文案优化
|
||||
|
||||
---
|
||||
|
||||
## v3.0.0-beta.2
|
||||
|
||||
**文档**
|
||||
- 新增登录系统对接文档
|
||||
- 优化前端工程打包构建部分文档说明
|
||||
- FAQ补充KnowStreaming连接特定JMX IP的说明
|
||||
|
||||
|
||||
**Bug修复**
|
||||
- 修复logi_security_oplog表字段过短,导致删除Topic等操作无法记录的问题
|
||||
- 修复ES查询时,抛java.lang.NumberFormatException: For input string: "{"value":0,"relation":"eq"}" 问题
|
||||
- 修复LogStartOffset和LogEndOffset指标单位错误问题
|
||||
- 修复进行副本变更时,旧副本数为NULL的问题
|
||||
- 修复集群Group列表,在第二页搜索时,搜索时返回的分页信息错误问题
|
||||
- 修复重置Offset时,返回的错误信息提示不一致的问题
|
||||
- 修复集群查看,系统查看,LoadRebalance等页面权限点缺失问题
|
||||
- 修复查询不存在的Topic时,错误信息提示不明显的问题
|
||||
- 修复Windows用户打包前端工程报错的问题
|
||||
- package-lock.json锁定前端依赖版本号,修复因依赖自动升级导致打包失败等问题
|
||||
- 系统管理子应用,补充后端返回的Code码拦截,解决后端接口返回报错不展示的问题
|
||||
- 修复用户登出后,依旧可以访问系统的问题
|
||||
- 修复巡检任务配置时,数值显示错误的问题
|
||||
- 修复Broker/Topic Overview 图表和图表详情问题
|
||||
- 修复Job扩缩副本任务明细数据错误的问题
|
||||
- 修复重置Offset时,分区ID,Offset数值无限制问题
|
||||
- 修复扩缩/迁移副本时,无法选中Kafka系统Topic的问题
|
||||
- 修复Topic的Config页面,编辑表单时不能正确回显当前值的问题
|
||||
- 修复Broker Card返回数据后依旧展示加载态的问题
|
||||
|
||||
|
||||
|
||||
**体验优化**
|
||||
- 优化默认用户密码为 admin/admin
|
||||
- 缩短新增集群后,集群信息加载的耗时
|
||||
- 集群Broker列表,增加Controller角色信息
|
||||
- 副本变更任务结束后,增加进行优先副本选举的操作
|
||||
- Task模块任务分为Metrics、Common、Metadata三类任务,每类任务配备独立线程池,减少对Job模块的线程池,以及不同类任务之间的相互影响
|
||||
- 删除代码中存在的多余无用文件
|
||||
- 自动新增ES索引模版及近7天索引,减少用户搭建时需要做的事项
|
||||
- 优化前端工程打包流程
|
||||
- 优化登录页文案,页面左侧栏内容,单集群详情样式,Topic列表趋势图等
|
||||
- 首次进入Broker/Topic图表详情时,进行预缓存数据从而优化体验
|
||||
- 优化Topic详情Partition Tab的展示
|
||||
- 多集群列表页增加编辑功能
|
||||
- 优化副本变更时,迁移时间支持分钟级别粒度
|
||||
- logi-security版本升级至2.10.13
|
||||
- logi-elasticsearch-client版本升级至1.0.24
|
||||
|
||||
|
||||
**能力提升**
|
||||
- 支持Ldap登录认证
|
||||
|
||||
---
|
||||
|
||||
## v3.0.0-beta.1
|
||||
|
||||
**文档**
|
||||
- 新增Task模块说明文档
|
||||
- FAQ补充 `Specified key was too long; max key length is 767 bytes ` 错误说明
|
||||
- FAQ补充 `出现ESIndexNotFoundException报错` 错误说明
|
||||
|
||||
|
||||
**Bug修复**
|
||||
- 修复 Consumer 点击 Stop 未停止检索的问题
|
||||
- 修复创建/编辑角色权限报错问题
|
||||
- 修复多集群管理/单集群详情均衡卡片状态错误问题
|
||||
- 修复版本列表未排序问题
|
||||
- 修复Raft集群Controller信息不断记录问题
|
||||
- 修复部分版本消费组描述信息获取失败问题
|
||||
- 修复分区Offset获取失败的日志中,缺少Topic名称信息问题
|
||||
- 修复GitHub图地址错误,及图裂问题
|
||||
- 修复Broker默认使用的地址和注释不一致问题
|
||||
- 修复 Consumer 列表分页不生效问题
|
||||
- 修复操作记录表operation_methods字段缺少默认值问题
|
||||
- 修复集群均衡表中move_broker_list字段无效的问题
|
||||
- 修复KafkaUser、KafkaACL信息获取时,日志一直重复提示不支持问题
|
||||
- 修复指标缺失时,曲线出现掉底的问题
|
||||
|
||||
|
||||
**体验优化**
|
||||
- 优化前端构建时间和打包体积,增加依赖打包的分包策略
|
||||
- 优化产品样式和文案展示
|
||||
- 优化ES客户端数为可配置
|
||||
- 优化日志中大量出现的MySQL Key冲突日志
|
||||
|
||||
|
||||
**能力提升**
|
||||
- 增加周期任务,用于主动创建缺少的ES模版及索引的能力,减少额外的脚本操作
|
||||
- 增加JMX连接的Broker地址可选择的能力
|
||||
|
||||
---
|
||||
|
||||
## v3.0.0-beta.0
|
||||
|
||||
**1、多集群管理**
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ error_exit ()
|
||||
[ ! -e "$JAVA_HOME/bin/java" ] && unset JAVA_HOME
|
||||
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if $darwin; then
|
||||
if [ "Darwin" = "$(uname -s)" ]; then
|
||||
|
||||
if [ -x '/usr/libexec/java_home' ] ; then
|
||||
export JAVA_HOME=`/usr/libexec/java_home`
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 9.5 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 183 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 50 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 59 KiB |
264
docs/dev_guide/Task模块简介.md
Normal file
264
docs/dev_guide/Task模块简介.md
Normal file
@@ -0,0 +1,264 @@
|
||||
# Task模块简介
|
||||
|
||||
## 1、Task简介
|
||||
|
||||
在 KnowStreaming 中(下面简称KS),Task模块主要是用于执行一些周期任务,包括Cluster、Broker、Topic等指标的定时采集,集群元数据定时更新至DB,集群状态的健康巡检等。在KS中,与Task模块相关的代码,我们都统一存放在km-task模块中。
|
||||
|
||||
Task模块是基于 LogiCommon 中的Logi-Job组件实现的任务周期执行,Logi-Job 的功能类似 XXX-Job,它是 XXX-Job 在 KnowStreaming 的内嵌实现,主要用于简化 KnowStreaming 的部署。
|
||||
Logi-Job 的任务总共有两种执行模式,分别是:
|
||||
|
||||
+ 广播模式:同一KS集群下,同一任务周期中,所有KS主机都会执行该定时任务。
|
||||
+ 抢占模式:同一KS集群下,同一任务周期中,仅有某一台KS主机会执行该任务。
|
||||
|
||||
KS集群范围定义:连接同一个DB,且application.yml中的spring.logi-job.app-name的名称一样的KS主机为同一KS集群。
|
||||
|
||||
## 2、使用指南
|
||||
|
||||
Task模块基于Logi-Job的广播模式与抢占模式,分别实现了任务的抢占执行、重复执行以及均衡执行,他们之间的差别是:
|
||||
|
||||
+ 抢占执行:同一个KS集群,同一个任务执行周期中,仅有一台KS主机执行该任务;
|
||||
+ 重复执行:同一个KS集群,同一个任务执行周期中,所有KS主机都执行该任务。比如3台KS主机,3个Kafka集群,此时每台KS主机都会去采集这3个Kafka集群的指标;
|
||||
+ 均衡执行:同一个KS集群,同一个任务执行周期中,每台KS主机仅执行该任务的一部分,所有的KS主机共同协作完成了任务。比如3台KS主机,3个Kafka集群,稳定运行情况下,每台KS主机将仅采集1个Kafka集群的指标,3台KS主机共同完成3个Kafka集群指标的采集。
|
||||
|
||||
下面我们看一下具体例子。
|
||||
|
||||
### 2.1、抢占模式——抢占执行
|
||||
|
||||
功能说明:
|
||||
|
||||
+ 同一个KS集群,同一个任务执行周期中,仅有一台KS主机执行该任务。
|
||||
|
||||
代码例子:
|
||||
|
||||
```java
|
||||
// 1、实现Job接口,重写excute方法;
|
||||
// 2、在类上添加@Task注解,并且配置好信息,指定为随机抢占模式;
|
||||
// 效果:KS集群中,每5秒,会有一台KS主机输出 "测试定时任务运行中";
|
||||
@Task(name = "TestJob",
|
||||
description = "测试定时任务",
|
||||
cron = "*/5 * * * * ?",
|
||||
autoRegister = true,
|
||||
consensual = ConsensualEnum.RANDOM, // 这里一定要设置为RANDOM
|
||||
timeout = 6 * 60)
|
||||
public class TestJob implements Job {
|
||||
|
||||
@Override
|
||||
public TaskResult execute(JobContext jobContext) throws Exception {
|
||||
|
||||
System.out.println("测试定时任务运行中");
|
||||
return new TaskResult();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
### 2.2、广播模式——重复执行
|
||||
|
||||
功能说明:
|
||||
|
||||
+ 同一个KS集群,同一个任务执行周期中,所有KS主机都执行该任务。比如3台KS主机,3个Kafka集群,此时每台KS主机都会去重复采集这3个Kafka集群的指标。
|
||||
|
||||
代码例子:
|
||||
|
||||
```java
|
||||
// 1、实现Job接口,重写excute方法;
|
||||
// 2、在类上添加@Task注解,并且配置好信息,指定为广播抢占模式;
|
||||
// 效果:KS集群中,每5秒,每台KS主机都会输出 "测试定时任务运行中";
|
||||
@Task(name = "TestJob",
|
||||
description = "测试定时任务",
|
||||
cron = "*/5 * * * * ?",
|
||||
autoRegister = true,
|
||||
consensual = ConsensualEnum.BROADCAST, // 这里一定要设置为BROADCAST
|
||||
timeout = 6 * 60)
|
||||
public class TestJob implements Job {
|
||||
|
||||
@Override
|
||||
public TaskResult execute(JobContext jobContext) throws Exception {
|
||||
|
||||
System.out.println("测试定时任务运行中");
|
||||
return new TaskResult();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
### 2.3、广播模式——均衡执行
|
||||
|
||||
功能说明:
|
||||
|
||||
+ 同一个KS集群,同一个任务执行周期中,每台KS主机仅执行该任务的一部分,所有的KS主机共同协作完成了任务。比如3台KS主机,3个Kafka集群,稳定运行情况下,每台KS主机将仅采集1个Kafka集群的指标,3台KS主机共同完成3个Kafka集群指标的采集。
|
||||
|
||||
代码例子:
|
||||
|
||||
+ 该模式有点特殊,是KS基于Logi-Job的广播模式,做的一个扩展,以下为一个使用例子:
|
||||
|
||||
```java
|
||||
// 1、继承AbstractClusterPhyDispatchTask,实现processSubTask方法;
|
||||
// 2、在类上添加@Task注解,并且配置好信息,指定为广播模式;
|
||||
// 效果:在本样例中,每隔1分钟ks会将所有的kafka集群列表在ks集群主机内均衡拆分,每台主机会将分发到自身的Kafka集群依次执行processSubTask方法,实现KS集群的任务协同处理。
|
||||
@Task(name = "kmJobTask",
|
||||
description = "km job 模块调度执行任务",
|
||||
cron = "0 0/1 * * * ? *",
|
||||
autoRegister = true,
|
||||
consensual = ConsensualEnum.BROADCAST,
|
||||
timeout = 6 * 60)
|
||||
public class KMJobTask extends AbstractClusterPhyDispatchTask {
|
||||
|
||||
@Autowired
|
||||
private JobService jobService;
|
||||
|
||||
@Override
|
||||
protected TaskResult processSubTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) throws Exception {
|
||||
jobService.scheduleJobByClusterId(clusterPhy.getId());
|
||||
return TaskResult.SUCCESS;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
## 3、原理简介
|
||||
|
||||
### 3.1、Task注解说明
|
||||
|
||||
```java
|
||||
public @interface Task {
|
||||
String name() default ""; //任务名称
|
||||
String description() default ""; //任务描述
|
||||
String owner() default "system"; //拥有者
|
||||
String cron() default ""; //定时执行的时间策略
|
||||
int retryTimes() default 0; //失败以后所能重试的最大次数
|
||||
long timeout() default 0; //在超时时间里重试
|
||||
//是否自动注册任务到数据库中
|
||||
//如果设置为false,需要手动去数据库km_task表注册定时任务信息。数据库记录和@Task注解缺一不可
|
||||
boolean autoRegister() default false;
|
||||
//执行模式:广播、随机抢占
|
||||
//广播模式:同一集群下的所有服务器都会执行该定时任务
|
||||
//随机抢占模式:同一集群下随机一台服务器执行该任务
|
||||
ConsensualEnum consensual() default ConsensualEnum.RANDOM;
|
||||
}
|
||||
```
|
||||
|
||||
### 3.2、数据库表介绍
|
||||
|
||||
+ logi_task:记录项目中的定时任务信息,一个定时任务对应一条记录。
|
||||
+ logi_job:具体任务执行信息。
|
||||
+ logi_job_log:定时任务的执行日志。
|
||||
+ logi_worker:记录机器信息,实现集群控制。
|
||||
|
||||
### 3.3、均衡执行简介
|
||||
|
||||
#### 3.3.1、类关系图
|
||||
|
||||
这里以KMJobTask为例,简单介绍KM中的定时任务实现逻辑。
|
||||
|
||||

|
||||
|
||||
+ Job:使用logi组件实现定时任务,必须实现该接口。
|
||||
+ Comparable & EntufyIdInterface:比较接口,实现任务的排序逻辑。
|
||||
+ AbstractDispatchTask:实现广播模式下,任务的均衡分发。
|
||||
+ AbstractClusterPhyDispatchTask:对分发到当前服务器的集群列表进行枚举。
|
||||
+ KMJobTask:实现对单个集群的定时任务处理。
|
||||
|
||||
#### 3.3.2、关键类代码
|
||||
|
||||
+ **AbstractDispatchTask类**
|
||||
|
||||
```java
|
||||
// 实现Job接口的抽象类,进行任务的负载均衡执行
|
||||
public abstract class AbstractDispatchTask<E extends Comparable & EntifyIdInterface> implements Job {
|
||||
|
||||
// 罗列所有的任务
|
||||
protected abstract List<E> listAllTasks();
|
||||
|
||||
// 执行被分配给该KS主机的任务
|
||||
protected abstract TaskResult processTask(List<E> subTaskList, long triggerTimeUnitMs);
|
||||
|
||||
// 被Logi-Job触发执行该方法
|
||||
// 该方法进行任务的分配
|
||||
@Override
|
||||
public TaskResult execute(JobContext jobContext) {
|
||||
try {
|
||||
|
||||
long triggerTimeUnitMs = System.currentTimeMillis();
|
||||
|
||||
// 获取所有的任务
|
||||
List<E> allTaskList = this.listAllTasks();
|
||||
|
||||
// 计算当前KS机器需要执行的任务
|
||||
List<E> subTaskList = this.selectTask(allTaskList, jobContext.getAllWorkerCodes(), jobContext.getCurrentWorkerCode());
|
||||
|
||||
// 进行任务处理
|
||||
return this.processTask(subTaskList, triggerTimeUnitMs);
|
||||
} catch (Exception e) {
|
||||
// ...
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
+ **AbstractClusterPhyDispatchTask类**
|
||||
|
||||
```java
|
||||
// 继承AbstractDispatchTask的抽象类,对Kafka集群进行负载均衡执行
|
||||
public abstract class AbstractClusterPhyDispatchTask extends AbstractDispatchTask<ClusterPhy> {
|
||||
|
||||
// 执行被分配的任务,具体由子类实现
|
||||
protected abstract TaskResult processSubTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) throws Exception;
|
||||
|
||||
// 返回所有的Kafka集群
|
||||
@Override
|
||||
public List<ClusterPhy> listAllTasks() {
|
||||
return clusterPhyService.listAllClusters();
|
||||
}
|
||||
|
||||
// 执行被分配给该KS主机的Kafka集群任务
|
||||
@Override
|
||||
public TaskResult processTask(List<ClusterPhy> subTaskList, long triggerTimeUnitMs) { // ... }
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
+ **KMJobTask类**
|
||||
|
||||
```java
|
||||
// 加上@Task注解,并配置任务执行信息
|
||||
@Task(name = "kmJobTask",
|
||||
description = "km job 模块调度执行任务",
|
||||
cron = "0 0/1 * * * ? *",
|
||||
autoRegister = true,
|
||||
consensual = ConsensualEnum.BROADCAST,
|
||||
timeout = 6 * 60)
|
||||
// 继承AbstractClusterPhyDispatchTask类
|
||||
public class KMJobTask extends AbstractClusterPhyDispatchTask {
|
||||
|
||||
@Autowired
|
||||
private JobService jobService;
|
||||
|
||||
// 执行该Kafka集群的Job模块的任务
|
||||
@Override
|
||||
protected TaskResult processSubTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) throws Exception {
|
||||
jobService.scheduleJobByClusterId(clusterPhy.getId());
|
||||
return TaskResult.SUCCESS;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### 3.3.3、均衡执行总结
|
||||
|
||||
均衡执行的实现原理总结起来就是以下几点:
|
||||
|
||||
+ Logi-Job设置为广播模式,触发所有的KS主机执行任务;
|
||||
+ 每台KS主机,被触发执行后,按照统一的规则,对任务列表,KS集群主机列表进行排序。然后按照顺序将任务列表均衡的分配给排序后的KS集群主机。KS集群稳定运行情况下,这一步保证了每台KS主机之间分配到的任务列表不重复,不丢失。
|
||||
+ 最后每台KS主机,执行被分配到的任务。
|
||||
|
||||
## 4、注意事项
|
||||
|
||||
+ 不能100%保证任务在一个周期内,且仅且执行一次,可能出现重复执行或丢失的情况,所以必须严格是且仅且执行一次的任务,不建议基于Logi-Job进行任务控制。
|
||||
+ 尽量让Logi-Job仅负责任务的触发,后续的执行建议放到自己创建的线程池中进行。
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 600 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 228 KiB |
@@ -36,7 +36,7 @@ KS-KM 根据其需要纳管的 kafka 版本,按照上述三个维度构建了
|
||||
|
||||
  KS-KM 的每个版本针对需要纳管的 kafka 版本列表,事先分析各个版本的差异性和产品需求,同时 KS-KM 构建了一套专门处理兼容性的服务,来进行兼容性的注册、字典构建、处理器分发等操作,其中版本兼容性处理器是来具体处理不同 kafka 版本差异性的地方。
|
||||
|
||||

|
||||

|
||||
|
||||
  如上图所示,KS-KM 的 topic 服务在面对不同 kafka 版本时,其 topic 的创建、删除、扩容由于 kafka 版本自身的差异,导致 KnowStreaming 的处理也不一样,所以需要根据不同的 kafka 版本来实现不同的兼容性处理器,同时向 KnowStreaming 的兼容服务进行兼容性的注册,构建兼容性字典,后续在 KnowStreaming 的运行过程中,针对不同的 kafka 版本即可分发到不同的处理器中执行。
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
- 初始化 MySQL 表及数据
|
||||
- 初始化 Elasticsearch 索引
|
||||
|
||||
具体见:[快速开始](./1-quick-start.md) 中的最后一步,部署 KnowStreaming 服务中的初始化相关工作。
|
||||
具体见:[单机部署手册](../install_guide/单机部署手册.md) 中的最后一步,部署 KnowStreaming 服务中的初始化相关工作。
|
||||
|
||||
### 6.1.4、本地启动
|
||||
|
||||
@@ -73,7 +73,7 @@ km-rest/src/main/java/com/xiaojukeji/know/streaming/km/rest/KnowStreaming.java
|
||||
IDEA 更多具体的配置如下图所示:
|
||||
|
||||
<p align="center">
|
||||
<img src="./assets/startup_using_source_code/IDEA配置.jpg" width = "512" height = "318" div align=center />
|
||||
<img src="http://img-ys011.didistatic.com/static/dc2img/do1_BW1RzgEMh4n6L4dL4ncl" width = "512" height = "318" div align=center />
|
||||
</p>
|
||||
|
||||
**第四步:启动项目**
|
||||
@@ -84,7 +84,7 @@ IDEA 更多具体的配置如下图所示:
|
||||
|
||||
`Know Streaming` 启动之后,可以访问一些信息,包括:
|
||||
|
||||
- 产品页面:http://localhost:8080 ,默认账号密码:`admin` / `admin2022_` 进行登录。
|
||||
- 产品页面:http://localhost:8080 ,默认账号密码:`admin` / `admin2022_` 进行登录。`v3.0.0-beta.2`版本开始,默认账号密码为`admin` / `admin`;
|
||||
- 接口地址:http://localhost:8080/swagger-ui.html 查看后端提供的相关接口。
|
||||
|
||||
更多信息,详见:[KnowStreaming 官网](https://knowstreaming.com/)
|
||||
199
docs/dev_guide/登录系统对接.md
Normal file
199
docs/dev_guide/登录系统对接.md
Normal file
@@ -0,0 +1,199 @@
|
||||
|
||||
|
||||

|
||||
|
||||
## 登录系统对接
|
||||
|
||||
[KnowStreaming](https://github.com/didi/KnowStreaming)(以下简称KS) 除了实现基于本地MySQL的用户登录认证方式外,还已经实现了基于Ldap的登录认证。
|
||||
|
||||
但是,登录认证系统并非仅此两种。因此,为了具有更好的拓展性,KS具有自定义登陆认证逻辑,快速对接已有系统的特性。
|
||||
|
||||
在KS中,我们将登陆认证相关的一些文件放在[km-extends](https://github.com/didi/KnowStreaming/tree/master/km-extends)模块下的[km-account](https://github.com/didi/KnowStreaming/tree/master/km-extends/km-account)模块里。
|
||||
|
||||
本文将介绍KS如何快速对接自有的用户登录认证系统。
|
||||
|
||||
### 对接步骤
|
||||
|
||||
- 创建一个登陆认证类,实现[LogiCommon](https://github.com/didi/LogiCommon)的LoginExtend接口;
|
||||
- 将[application.yml](https://github.com/didi/KnowStreaming/blob/master/km-rest/src/main/resources/application.yml)中的spring.logi-security.login-extend-bean-name字段改为登陆认证类的bean名称;
|
||||
|
||||
```Java
|
||||
//LoginExtend 接口
|
||||
public interface LoginExtend {
|
||||
|
||||
/**
|
||||
* 验证登录信息,同时记住登录状态
|
||||
*/
|
||||
UserBriefVO verifyLogin(AccountLoginDTO var1, HttpServletRequest var2, HttpServletResponse var3) throws LogiSecurityException;
|
||||
|
||||
/**
|
||||
* 登出接口,清楚登录状态
|
||||
*/
|
||||
Result<Boolean> logout(HttpServletRequest var1, HttpServletResponse var2);
|
||||
|
||||
/**
|
||||
* 检查是否已经登录
|
||||
*/
|
||||
boolean interceptorCheck(HttpServletRequest var1, HttpServletResponse var2, String var3, List<String> var4) throws IOException;
|
||||
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
|
||||
|
||||
### 对接例子
|
||||
|
||||
我们以Ldap对接为例,说明KS如何对接登录认证系统。
|
||||
|
||||
+ 编写[LdapLoginServiceImpl](https://github.com/didi/KnowStreaming/blob/master/km-extends/km-account/src/main/java/com/xiaojukeji/know/streaming/km/account/login/ldap/LdapLoginServiceImpl.java)类,实现LoginExtend接口。
|
||||
+ 设置[application.yml](https://github.com/didi/KnowStreaming/blob/master/km-rest/src/main/resources/application.yml)中的spring.logi-security.login-extend-bean-name=ksLdapLoginService。
|
||||
|
||||
完成上述两步即可实现KS对接Ldap认证登陆。
|
||||
|
||||
```Java
|
||||
@Service("ksLdapLoginService")
|
||||
public class LdapLoginServiceImpl implements LoginExtend {
|
||||
|
||||
|
||||
@Override
|
||||
public UserBriefVO verifyLogin(AccountLoginDTO loginDTO,
|
||||
HttpServletRequest request,
|
||||
HttpServletResponse response) throws LogiSecurityException {
|
||||
String decodePasswd = AESUtils.decrypt(loginDTO.getPw());
|
||||
|
||||
// 去LDAP验证账密
|
||||
LdapPrincipal ldapAttrsInfo = ldapAuthentication.authenticate(loginDTO.getUserName(), decodePasswd);
|
||||
if (ldapAttrsInfo == null) {
|
||||
// 用户不存在,正常来说上如果有问题,上一步会直接抛出异常
|
||||
throw new LogiSecurityException(ResultCode.USER_NOT_EXISTS);
|
||||
}
|
||||
|
||||
// 进行业务相关操作
|
||||
|
||||
// 记录登录状态,Ldap因为无法记录登录状态,因此有KnowStreaming进行记录
|
||||
initLoginContext(request, response, loginDTO.getUserName(), user.getId());
|
||||
return CopyBeanUtil.copy(user, UserBriefVO.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result<Boolean> logout(HttpServletRequest request, HttpServletResponse response) {
|
||||
|
||||
//清理cookie和session
|
||||
|
||||
return Result.buildSucc(Boolean.TRUE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean interceptorCheck(HttpServletRequest request, HttpServletResponse response, String requestMappingValue, List<String> whiteMappingValues) throws IOException {
|
||||
|
||||
// 检查是否已经登录
|
||||
String userName = HttpRequestUtil.getOperator(request);
|
||||
if (StringUtils.isEmpty(userName)) {
|
||||
// 未登录,则进行登出
|
||||
logout(request, response);
|
||||
return Boolean.FALSE;
|
||||
}
|
||||
|
||||
return Boolean.TRUE;
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
|
||||
|
||||
### 实现原理
|
||||
|
||||
因为登陆和登出整体实现逻辑是一致的,所以我们以登陆逻辑为例进行介绍。
|
||||
|
||||
+ 登陆原理
|
||||
|
||||
登陆走的是[LogiCommon](https://github.com/didi/LogiCommon)自带的LoginController。
|
||||
|
||||
```java
|
||||
@RestController
|
||||
public class LoginController {
|
||||
|
||||
|
||||
//登陆接口
|
||||
@PostMapping({"/login"})
|
||||
public Result<UserBriefVO> login(HttpServletRequest request, HttpServletResponse response, @RequestBody AccountLoginDTO loginDTO) {
|
||||
try {
|
||||
//登陆认证
|
||||
UserBriefVO userBriefVO = this.loginService.verifyLogin(loginDTO, request, response);
|
||||
return Result.success(userBriefVO);
|
||||
|
||||
} catch (LogiSecurityException var5) {
|
||||
return Result.fail(var5);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
而登陆操作是调用LoginServiceImpl类来实现,但是具体由哪个登陆认证类来执行登陆操作却由loginExtendBeanTool来指定。
|
||||
|
||||
```java
|
||||
//LoginServiceImpl类
|
||||
@Service
|
||||
public class LoginServiceImpl implements LoginService {
|
||||
|
||||
//实现登陆操作,但是具体哪个登陆类由loginExtendBeanTool来管理
|
||||
public UserBriefVO verifyLogin(AccountLoginDTO loginDTO, HttpServletRequest request, HttpServletResponse response) throws LogiSecurityException {
|
||||
|
||||
return this.loginExtendBeanTool.getLoginExtendImpl().verifyLogin(loginDTO, request, response);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
而loginExtendBeanTool类会优先去查找用户指定的登陆认证类,如果失败则调用默认的登陆认证函数。
|
||||
|
||||
```java
|
||||
//LoginExtendBeanTool类
|
||||
@Component("logiSecurityLoginExtendBeanTool")
|
||||
public class LoginExtendBeanTool {
|
||||
|
||||
public LoginExtend getLoginExtendImpl() {
|
||||
LoginExtend loginExtend;
|
||||
//先调用用户指定登陆类,如果失败则调用系统默认登陆认证
|
||||
try {
|
||||
//调用的类由spring.logi-security.login-extend-bean-name指定
|
||||
loginExtend = this.getCustomLoginExtendImplBean();
|
||||
} catch (UnsupportedOperationException var3) {
|
||||
loginExtend = this.getDefaultLoginExtendImplBean();
|
||||
}
|
||||
|
||||
return loginExtend;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
+ 认证原理
|
||||
|
||||
认证的实现则比较简单,向Spring中注册我们的拦截器PermissionInterceptor。
|
||||
|
||||
拦截器会调用LoginServiceImpl类的拦截方法,LoginServiceImpl后续处理逻辑就和前面登陆是一致的。
|
||||
|
||||
```java
|
||||
public class PermissionInterceptor implements HandlerInterceptor {
|
||||
|
||||
|
||||
/**
|
||||
* 拦截预处理
|
||||
* @return boolean false:拦截, 不向下执行, true:放行
|
||||
*/
|
||||
@Override
|
||||
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
|
||||
|
||||
//免登录相关校验,如果验证通过,提前返回
|
||||
|
||||
//走拦截函数,进行普通用户验证
|
||||
return loginService.interceptorCheck(request, response, classRequestMappingValue, whiteMappingValues);
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,25 +1,20 @@
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
## JMX-连接失败问题解决
|
||||
|
||||
- [JMX-连接失败问题解决](#jmx-连接失败问题解决)
|
||||
- [1、问题&说明](#1问题说明)
|
||||
- [2、解决方法](#2解决方法)
|
||||
- [3、解决方法 —— 认证的JMX](#3解决方法--认证的jmx)
|
||||
|
||||
集群正常接入Logi-KafkaManager之后,即可以看到集群的Broker列表,此时如果查看不了Topic的实时流量,或者是Broker的实时流量信息时,那么大概率就是JMX连接的问题了。
|
||||
集群正常接入`KnowStreaming`之后,即可以看到集群的Broker列表,此时如果查看不了Topic的实时流量,或者是Broker的实时流量信息时,那么大概率就是`JMX`连接的问题了。
|
||||
|
||||
下面我们按照步骤来一步一步的检查。
|
||||
|
||||
### 1、问题&说明
|
||||
### 1、问题说明
|
||||
|
||||
**类型一:JMX配置未开启**
|
||||
|
||||
未开启时,直接到`2、解决方法`查看如何开启即可。
|
||||
|
||||

|
||||

|
||||
|
||||
|
||||
**类型二:配置错误**
|
||||
@@ -43,6 +38,26 @@ java.rmi.ConnectException: Connection refused to host: 192.168.0.1; nested excep
|
||||
java.rmi.ConnectException: Connection refused to host: 127.0.0.1;; nested exception is:
|
||||
```
|
||||
|
||||
**类型三:连接特定IP**
|
||||
|
||||
Broker 配置了内外网,而JMX在配置时,可能配置了内网IP或者外网IP,此时 `KnowStreaming` 需要连接到特定网络的IP才可以进行访问。
|
||||
|
||||
比如:
|
||||
|
||||
Broker在ZK的存储结构如下所示,我们期望连接到 `endpoints` 中标记为 `INTERNAL` 的地址,但是 `KnowStreaming` 却连接了 `EXTERNAL` 的地址,此时可以看 `4、解决方法 —— JMX连接特定网络` 进行解决。
|
||||
|
||||
```json
|
||||
{
|
||||
"listener_security_protocol_map": {"EXTERNAL":"SASL_PLAINTEXT","INTERNAL":"SASL_PLAINTEXT"},
|
||||
"endpoints": ["EXTERNAL://192.168.0.1:7092","INTERNAL://192.168.0.2:7093"],
|
||||
"jmx_port": 8099,
|
||||
"host": "192.168.0.1",
|
||||
"timestamp": "1627289710439",
|
||||
"port": -1,
|
||||
"version": 4
|
||||
}
|
||||
```
|
||||
|
||||
### 2、解决方法
|
||||
|
||||
这里仅介绍一下比较通用的解决方式,如若有更好的方式,欢迎大家指导告知一下。
|
||||
@@ -76,26 +91,36 @@ fi
|
||||
|
||||
如果您是直接看的这个部分,建议先看一下上一节:`2、解决方法`以确保`JMX`的配置没有问题了。
|
||||
|
||||
在JMX的配置等都没有问题的情况下,如果是因为认证的原因导致连接不了的,此时可以使用下面介绍的方法进行解决。
|
||||
在`JMX`的配置等都没有问题的情况下,如果是因为认证的原因导致连接不了的,可以在集群接入界面配置你的`JMX`认证信息。
|
||||
|
||||
**当前这块后端刚刚开发完成,可能还不够完善,有问题随时沟通。**
|
||||
<img src='http://img-ys011.didistatic.com/static/dc2img/do1_EUU352qMEX1Jdp7pxizp' width=350>
|
||||
|
||||
`Logi-KafkaManager 2.2.0+`之后的版本后端已经支持`JMX`认证方式的连接,但是还没有界面,此时我们可以往`cluster`表的`jmx_properties`字段写入`JMX`的认证信息。
|
||||
|
||||
这个数据是`json`格式的字符串,例子如下所示:
|
||||
|
||||
### 4、解决方法 —— JMX连接特定网络
|
||||
|
||||
可以手动往`ks_km_physical_cluster`表的`jmx_properties`字段增加一个`useWhichEndpoint`字段,从而控制 `KnowStreaming` 连接到特定的JMX IP及PORT。
|
||||
|
||||
`jmx_properties`格式:
|
||||
```json
|
||||
{
|
||||
"maxConn": 10, # KM对单台Broker的最大JMX连接数
|
||||
"username": "xxxxx", # 用户名
|
||||
"password": "xxxx", # 密码
|
||||
"maxConn": 100, # KM对单台Broker的最大JMX连接数
|
||||
"username": "xxxxx", # 用户名,可以不填写
|
||||
"password": "xxxx", # 密码,可以不填写
|
||||
"openSSL": true, # 开启SSL, true表示开启ssl, false表示关闭
|
||||
"useWhichEndpoint": "EXTERNAL" #指定要连接的网络名称,填写EXTERNAL就是连接endpoints里面的EXTERNAL地址
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
SQL的例子:
|
||||
SQL例子:
|
||||
```sql
|
||||
UPDATE cluster SET jmx_properties='{ "maxConn": 10, "username": "xxxxx", "password": "xxxx", "openSSL": false }' where id={xxx};
|
||||
```
|
||||
UPDATE ks_km_physical_cluster SET jmx_properties='{ "maxConn": 10, "username": "xxxxx", "password": "xxxx", "openSSL": false , "useWhichEndpoint": "xxx"}' where id={xxx};
|
||||
```
|
||||
|
||||
注意:
|
||||
|
||||
+ 目前此功能只支持采用 `ZK` 做分布式协调的kafka集群。
|
||||
|
||||
|
||||
@@ -6,9 +6,10 @@
|
||||
|
||||
### 2.1.1、安装说明
|
||||
|
||||
- 以 `v3.0.0-bete` 版本为例进行部署;
|
||||
- 以 `v3.0.0-beta.1` 版本为例进行部署;
|
||||
- 以 CentOS-7 为例,系统基础配置要求 4C-8G;
|
||||
- 部署完成后,可通过浏览器:`IP:PORT` 进行访问,默认端口是 `8080`,系统默认账号密码: `admin` / `admin2022_`;
|
||||
- 部署完成后,可通过浏览器:`IP:PORT` 进行访问,默认端口是 `8080`,系统默认账号密码: `admin` / `admin2022_`。
|
||||
- `v3.0.0-beta.2`版本开始,默认账号密码为`admin` / `admin`;
|
||||
- 本文为单机部署,如需分布式部署,[请联系我们](https://knowstreaming.com/support-center)
|
||||
|
||||
**软件依赖**
|
||||
@@ -19,7 +20,7 @@
|
||||
| ElasticSearch | v7.6+ | 8060 |
|
||||
| JDK | v8+ | - |
|
||||
| CentOS | v6+ | - |
|
||||
| Ubantu | v16+ | - |
|
||||
| Ubuntu | v16+ | - |
|
||||
|
||||
|
||||
|
||||
@@ -29,7 +30,7 @@
|
||||
|
||||
```bash
|
||||
# 在服务器中下载安装脚本, 该脚本中会在当前目录下,重新安装MySQL。重装后的mysql密码存放在当前目录的mysql.password文件中。
|
||||
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/deploy_KnowStreaming.sh
|
||||
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/deploy_KnowStreaming-3.0.0-beta.1.sh
|
||||
|
||||
# 执行脚本
|
||||
sh deploy_KnowStreaming.sh
|
||||
@@ -42,10 +43,10 @@ sh deploy_KnowStreaming.sh
|
||||
|
||||
```bash
|
||||
# 将安装包下载到本地且传输到目标服务器
|
||||
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/KnowStreaming-3.0.0-beta—offline.tar.gz
|
||||
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/KnowStreaming-3.0.0-beta.1-offline.tar.gz
|
||||
|
||||
# 解压安装包
|
||||
tar -zxf KnowStreaming-3.0.0-beta—offline.tar.gz
|
||||
tar -zxf KnowStreaming-3.0.0-beta.1-offline.tar.gz
|
||||
|
||||
# 执行安装脚本
|
||||
sh deploy_KnowStreaming-offline.sh
|
||||
@@ -58,28 +59,182 @@ sh deploy_KnowStreaming-offline.sh
|
||||
|
||||
### 2.1.3、容器部署
|
||||
|
||||
#### 2.1.3.1、Helm
|
||||
|
||||
**环境依赖**
|
||||
|
||||
- Kubernetes >= 1.14 ,Helm >= 2.17.0
|
||||
|
||||
- 默认配置为全部安装( ElasticSearch + MySQL + KnowStreaming)
|
||||
- 默认依赖全部安装,ElasticSearch(3 节点集群模式) + MySQL(单机) + KnowStreaming-manager + KnowStreaming-ui
|
||||
|
||||
- 如果使用已有的 ElasticSearch(7.6.x) 和 MySQL(5.7) 只需调整 values.yaml 部分参数即可
|
||||
- 使用已有的 ElasticSearch(7.6.x) 和 MySQL(5.7) 只需调整 values.yaml 部分参数即可
|
||||
|
||||
**安装命令**
|
||||
|
||||
```bash
|
||||
# 下载安装包
|
||||
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/knowstreaming-3.0.0-hlem.tgz
|
||||
|
||||
# 解压安装包
|
||||
tar -zxf knowstreaming-3.0.0-hlem.tgz
|
||||
|
||||
# 执行命令(NAMESPACE需要更改为已存在的)
|
||||
helm install -n [NAMESPACE] knowstreaming knowstreaming-manager/
|
||||
# 相关镜像在Docker Hub都可以下载
|
||||
# 快速安装(NAMESPACE需要更改为已存在的,安装启动需要几分钟初始化请稍等~)
|
||||
helm install -n [NAMESPACE] [NAME] http://download.knowstreaming.com/charts/knowstreaming-manager-0.1.5.tgz
|
||||
|
||||
# 获取KnowStreaming前端ui的service. 默认nodeport方式.
|
||||
# (http://nodeIP:nodeport,默认用户名密码:admin/admin2022_)
|
||||
# `v3.0.0-beta.2`版本开始(helm chart包版本0.1.4开始),默认账号密码为`admin` / `admin`;
|
||||
|
||||
# 添加仓库
|
||||
helm repo add knowstreaming http://download.knowstreaming.com/charts
|
||||
|
||||
# 拉取最新版本
|
||||
helm pull knowstreaming/knowstreaming-manager
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### 2.1.3.2、Docker Compose
|
||||
**环境依赖**
|
||||
|
||||
- [Docker](https://docs.docker.com/engine/install/)
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/)
|
||||
|
||||
|
||||
**安装命令**
|
||||
```bash
|
||||
# `v3.0.0-beta.2`版本开始(docker镜像为0.2.0版本开始),默认账号密码为`admin` / `admin`;
|
||||
# https://hub.docker.com/u/knowstreaming 在此处寻找最新镜像版本
|
||||
# mysql与es可以使用自己搭建的服务,调整对应配置即可
|
||||
|
||||
# 复制docker-compose.yml到指定位置后执行下方命令即可启动
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
**验证安装**
|
||||
```shell
|
||||
docker-compose ps
|
||||
# 验证启动 - 状态为 UP 则表示成功
|
||||
Name Command State Ports
|
||||
----------------------------------------------------------------------------------------------------
|
||||
elasticsearch-single /usr/local/bin/docker-entr ... Up 9200/tcp, 9300/tcp
|
||||
knowstreaming-init /bin/bash /es_template_cre ... Up
|
||||
knowstreaming-manager /bin/sh /ks-start.sh Up 80/tcp
|
||||
knowstreaming-mysql /entrypoint.sh mysqld Up (health: starting) 3306/tcp, 33060/tcp
|
||||
knowstreaming-ui /docker-entrypoint.sh ngin ... Up 0.0.0.0:80->80/tcp
|
||||
|
||||
# 稍等一分钟左右 knowstreaming-init 会退出,表示es初始化完成,可以访问页面
|
||||
Name Command State Ports
|
||||
-------------------------------------------------------------------------------------------
|
||||
knowstreaming-init /bin/bash /es_template_cre ... Exit 0
|
||||
knowstreaming-mysql /entrypoint.sh mysqld Up (healthy) 3306/tcp, 33060/tcp
|
||||
```
|
||||
|
||||
**访问**
|
||||
```http request
|
||||
http://127.0.0.1:80/
|
||||
```
|
||||
|
||||
|
||||
**docker-compose.yml**
|
||||
```yml
|
||||
version: "2"
|
||||
services:
|
||||
# *不要调整knowstreaming-manager服务名称,ui中会用到
|
||||
knowstreaming-manager:
|
||||
image: knowstreaming/knowstreaming-manager:latest
|
||||
container_name: knowstreaming-manager
|
||||
privileged: true
|
||||
restart: always
|
||||
depends_on:
|
||||
- elasticsearch-single
|
||||
- knowstreaming-mysql
|
||||
expose:
|
||||
- 80
|
||||
command:
|
||||
- /bin/sh
|
||||
- /ks-start.sh
|
||||
environment:
|
||||
TZ: Asia/Shanghai
|
||||
# mysql服务地址
|
||||
SERVER_MYSQL_ADDRESS: knowstreaming-mysql:3306
|
||||
# mysql数据库名
|
||||
SERVER_MYSQL_DB: know_streaming
|
||||
# mysql用户名
|
||||
SERVER_MYSQL_USER: root
|
||||
# mysql用户密码
|
||||
SERVER_MYSQL_PASSWORD: admin2022_
|
||||
# es服务地址
|
||||
SERVER_ES_ADDRESS: elasticsearch-single:9200
|
||||
# 服务JVM参数
|
||||
JAVA_OPTS: -Xmx1g -Xms1g
|
||||
# 对于kafka中ADVERTISED_LISTENERS填写的hostname可以通过该方式完成
|
||||
# extra_hosts:
|
||||
# - "hostname:x.x.x.x"
|
||||
# 服务日志路径
|
||||
# volumes:
|
||||
# - /ks/manage/log:/logs
|
||||
knowstreaming-ui:
|
||||
image: knowstreaming/knowstreaming-ui:latest
|
||||
container_name: knowstreaming-ui
|
||||
restart: always
|
||||
ports:
|
||||
- '80:80'
|
||||
environment:
|
||||
TZ: Asia/Shanghai
|
||||
depends_on:
|
||||
- knowstreaming-manager
|
||||
# extra_hosts:
|
||||
# - "hostname:x.x.x.x"
|
||||
elasticsearch-single:
|
||||
image: docker.io/library/elasticsearch:7.6.2
|
||||
container_name: elasticsearch-single
|
||||
restart: always
|
||||
expose:
|
||||
- 9200
|
||||
- 9300
|
||||
# ports:
|
||||
# - '9200:9200'
|
||||
# - '9300:9300'
|
||||
environment:
|
||||
TZ: Asia/Shanghai
|
||||
# es的JVM参数
|
||||
ES_JAVA_OPTS: -Xms512m -Xmx512m
|
||||
# 单节点配置,多节点集群参考 https://www.elastic.co/guide/en/elasticsearch/reference/7.6/docker.html#docker-compose-file
|
||||
discovery.type: single-node
|
||||
# 数据持久化路径
|
||||
# volumes:
|
||||
# - /ks/es/data:/usr/share/elasticsearch/data
|
||||
|
||||
# es初始化服务,与manager使用同一镜像
|
||||
# 首次启动es需初始化模版和索引,后续会自动创建
|
||||
knowstreaming-init:
|
||||
image: knowstreaming/knowstreaming-manager:latest
|
||||
container_name: knowstreaming-init
|
||||
depends_on:
|
||||
- elasticsearch-single
|
||||
command:
|
||||
- /bin/bash
|
||||
- /es_template_create.sh
|
||||
environment:
|
||||
TZ: Asia/Shanghai
|
||||
# es服务地址
|
||||
SERVER_ES_ADDRESS: elasticsearch-single:9200
|
||||
|
||||
knowstreaming-mysql:
|
||||
image: knowstreaming/knowstreaming-mysql:latest
|
||||
container_name: knowstreaming-mysql
|
||||
restart: always
|
||||
environment:
|
||||
TZ: Asia/Shanghai
|
||||
# root 用户密码
|
||||
MYSQL_ROOT_PASSWORD: admin2022_
|
||||
# 初始化时创建的数据库名称
|
||||
MYSQL_DATABASE: know_streaming
|
||||
# 通配所有host,可以访问远程
|
||||
MYSQL_ROOT_HOST: '%'
|
||||
expose:
|
||||
- 3306
|
||||
# ports:
|
||||
# - '3306:3306'
|
||||
# 数据持久化路径
|
||||
# volumes:
|
||||
# - /ks/mysql/data:/data/mysql
|
||||
```
|
||||
|
||||
|
||||
@@ -219,10 +374,10 @@ sh /data/elasticsearch/control.sh status
|
||||
|
||||
```bash
|
||||
# 下载安装包
|
||||
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/KnowStreaming-3.0.0-beta.tar.gz
|
||||
wget https://s3-gzpu.didistatic.com/pub/knowstreaming/KnowStreaming-3.0.0-beta.1.tar.gz
|
||||
|
||||
# 解压安装包到指定目录
|
||||
tar -zxf KnowStreaming-3.0.0-beta.tar.gz -C /data/
|
||||
tar -zxf KnowStreaming-3.0.0-beta.1.tar.gz -C /data/
|
||||
|
||||
# 修改启动脚本并加入systemd管理
|
||||
cd /data/KnowStreaming/
|
||||
@@ -236,7 +391,7 @@ mysql -uroot -pDidi_km_678 know_streaming < ./init/sql/dml-ks-km.sql
|
||||
mysql -uroot -pDidi_km_678 know_streaming < ./init/sql/dml-logi.sql
|
||||
|
||||
# 创建elasticsearch初始化数据
|
||||
sh ./init/template/template.sh
|
||||
sh ./bin/init_es_template.sh
|
||||
|
||||
# 修改配置文件
|
||||
vim ./conf/application.yml
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
# `Know Streaming` 源码编译打包手册
|
||||
|
||||
@@ -11,7 +9,7 @@
|
||||
`windows7+`、`Linux`、`Mac`
|
||||
|
||||
**环境依赖**
|
||||
|
||||
|
||||
- Maven 3.6.3 (后端)
|
||||
- Node v12.20.0/v14.17.3 (前端)
|
||||
- Java 8+ (后端)
|
||||
@@ -25,27 +23,23 @@
|
||||
|
||||
具体见下面描述。
|
||||
|
||||
|
||||
|
||||
### 2.1、前后端合并打包
|
||||
|
||||
1. 下载源码;
|
||||
2. 进入 `KS-KM` 工程目录,执行 `mvn -Prelease-package -Dmaven.test.skip=true clean install -U` 命令;
|
||||
3. 打包命令执行完成后,会在 `km-dist/target` 目录下面生成一个 `KnowStreaming-*.tar.gz` 的安装包。
|
||||
|
||||
|
||||
### 2.2、前端单独打包
|
||||
### 2.2、前端单独打包
|
||||
|
||||
1. 下载源码;
|
||||
2. 进入 `KS-KM/km-console` 工程目录;
|
||||
3. 执行 `npm run build`命令,会在 `KS-KM/km-console` 目录下生成一个名为 `pub` 的前端静态资源包;
|
||||
2. 跳转到 [前端打包构建文档](https://github.com/didi/KnowStreaming/blob/master/km-console/README.md) 按步骤进行。打包成功后,会在 `km-rest/src/main/resources` 目录下生成名为 `templates` 的前端静态资源包;
|
||||
3. 如果上一步过程中报错,请查看 [FAQ](https://github.com/didi/KnowStreaming/blob/master/docs/user_guide/faq.md) 第 8.10 条;
|
||||
|
||||
|
||||
|
||||
### 2.3、后端单独打包
|
||||
### 2.3、后端单独打包
|
||||
|
||||
1. 下载源码;
|
||||
2. 修改顶层 `pom.xml` ,去掉其中的 `km-console` 模块,如下所示;
|
||||
|
||||
```xml
|
||||
<modules>
|
||||
<!-- <module>km-console</module>-->
|
||||
@@ -62,10 +56,7 @@
|
||||
<module>km-rest</module>
|
||||
<module>km-dist</module>
|
||||
</modules>
|
||||
```
|
||||
```
|
||||
|
||||
3. 执行 `mvn -U clean package -Dmaven.test.skip=true`命令;
|
||||
4. 执行完成之后会在 `KS-KM/km-rest/target` 目录下面生成一个 `ks-km.jar` 即为KS的后端部署的Jar包,也可以执行 `mvn -Prelease-package -Dmaven.test.skip=true clean install -U` 生成的tar包也仅有后端服务的功能;
|
||||
|
||||
|
||||
|
||||
|
||||
4. 执行完成之后会在 `KS-KM/km-rest/target` 目录下面生成一个 `ks-km.jar` 即为 KS 的后端部署的 Jar 包,也可以执行 `mvn -Prelease-package -Dmaven.test.skip=true clean install -U` 生成的 tar 包也仅有后端服务的功能;
|
||||
|
||||
@@ -1,6 +1,118 @@
|
||||
## 6.2、版本升级手册
|
||||
|
||||
**`2.x`版本 升级至 `3.0.0`版本**
|
||||
注意:
|
||||
- 如果想升级至具体版本,需要将你当前版本至你期望使用版本的变更统统执行一遍,然后才能正常使用。
|
||||
- 如果中间某个版本没有升级信息,则表示该版本直接替换安装包即可从前一个版本升级至当前版本。
|
||||
|
||||
|
||||
### 6.2.0、升级至 `master` 版本
|
||||
|
||||
暂无
|
||||
|
||||
|
||||
### 6.2.1、升级至 `v3.0.0` 版本
|
||||
|
||||
**SQL 变更**
|
||||
|
||||
```sql
|
||||
ALTER TABLE `ks_km_physical_cluster`
|
||||
ADD COLUMN `zk_properties` TEXT NULL COMMENT 'ZK配置' AFTER `jmx_properties`;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
|
||||
### 6.2.2、升级至 `v3.0.0-beta.2`版本
|
||||
|
||||
**配置变更**
|
||||
|
||||
```yaml
|
||||
|
||||
# 新增配置
|
||||
spring:
|
||||
logi-security: # know-streaming 依赖的 logi-security 模块的数据库的配置,默认与 know-streaming 的数据库配置保持一致即可
|
||||
login-extend-bean-name: logiSecurityDefaultLoginExtendImpl # 使用的登录系统Service的Bean名称,无需修改
|
||||
|
||||
# 线程池大小相关配置,在task模块中,新增了三类线程池,
|
||||
# 从而减少不同类型任务之间的相互影响,以及减少对logi-job内的线程池的影响
|
||||
thread-pool:
|
||||
task: # 任务模块的配置
|
||||
metrics: # metrics采集任务配置
|
||||
thread-num: 18 # metrics采集任务线程池核心线程数
|
||||
queue-size: 180 # metrics采集任务线程池队列大小
|
||||
metadata: # metadata同步任务配置
|
||||
thread-num: 27 # metadata同步任务线程池核心线程数
|
||||
queue-size: 270 # metadata同步任务线程池队列大小
|
||||
common: # 剩余其他任务配置
|
||||
thread-num: 15 # 剩余其他任务线程池核心线程数
|
||||
queue-size: 150 # 剩余其他任务线程池队列大小
|
||||
|
||||
# 删除配置,下列配置将不再使用
|
||||
thread-pool:
|
||||
task: # 任务模块的配置
|
||||
heaven: # 采集任务配置
|
||||
thread-num: 20 # 采集任务线程池核心线程数
|
||||
queue-size: 1000 # 采集任务线程池队列大小
|
||||
|
||||
```
|
||||
|
||||
**SQL 变更**
|
||||
|
||||
```sql
|
||||
-- 多集群管理权限2022-09-06新增
|
||||
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2000', '多集群管理查看', '1593', '1', '2', '多集群管理查看', '0', 'know-streaming');
|
||||
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2002', 'Topic-迁移副本', '1593', '1', '2', 'Topic-迁移副本', '0', 'know-streaming');
|
||||
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2004', 'Topic-扩缩副本', '1593', '1', '2', 'Topic-扩缩副本', '0', 'know-streaming');
|
||||
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2006', 'Cluster-LoadReBalance-周期均衡', '1593', '1', '2', 'Cluster-LoadReBalance-周期均衡', '0', 'know-streaming');
|
||||
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2008', 'Cluster-LoadReBalance-立即均衡', '1593', '1', '2', 'Cluster-LoadReBalance-立即均衡', '0', 'know-streaming');
|
||||
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('2010', 'Cluster-LoadReBalance-设置集群规格', '1593', '1', '2', 'Cluster-LoadReBalance-设置集群规格', '0', 'know-streaming');
|
||||
|
||||
|
||||
-- 系统管理权限2022-09-06新增
|
||||
INSERT INTO `logi_security_permission` (`id`, `permission_name`, `parent_id`, `leaf`, `level`, `description`, `is_delete`, `app_name`) VALUES ('3000', '系统管理查看', '1595', '1', '2', '系统管理查看', '0', 'know-streaming');
|
||||
|
||||
|
||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2000', '0', 'know-streaming');
|
||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2002', '0', 'know-streaming');
|
||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2004', '0', 'know-streaming');
|
||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2006', '0', 'know-streaming');
|
||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2008', '0', 'know-streaming');
|
||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '2010', '0', 'know-streaming');
|
||||
INSERT INTO `logi_security_role_permission` (`role_id`, `permission_id`, `is_delete`, `app_name`) VALUES ('1677', '3000', '0', 'know-streaming');
|
||||
|
||||
-- 修改字段长度
|
||||
ALTER TABLE `logi_security_oplog`
|
||||
CHANGE COLUMN `operator_ip` `operator_ip` VARCHAR(64) NOT NULL COMMENT '操作者ip' ,
|
||||
CHANGE COLUMN `operator` `operator` VARCHAR(64) NULL DEFAULT NULL COMMENT '操作者账号' ,
|
||||
CHANGE COLUMN `operate_page` `operate_page` VARCHAR(64) NOT NULL DEFAULT '' COMMENT '操作页面' ,
|
||||
CHANGE COLUMN `operate_type` `operate_type` VARCHAR(64) NOT NULL COMMENT '操作类型' ,
|
||||
CHANGE COLUMN `target_type` `target_type` VARCHAR(64) NOT NULL COMMENT '对象分类' ,
|
||||
CHANGE COLUMN `target` `target` VARCHAR(1024) NOT NULL COMMENT '操作对象' ,
|
||||
CHANGE COLUMN `operation_methods` `operation_methods` VARCHAR(64) NOT NULL DEFAULT '' COMMENT '操作方式' ;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 6.2.3、升级至 `v3.0.0-beta.1`版本
|
||||
|
||||
**SQL 变更**
|
||||
|
||||
1、在`ks_km_broker`表增加了一个监听信息字段。
|
||||
2、为`logi_security_oplog`表 operation_methods 字段设置默认值''。
|
||||
因此需要执行下面的 sql 对数据库表进行更新。
|
||||
|
||||
```sql
|
||||
ALTER TABLE `ks_km_broker`
|
||||
ADD COLUMN `endpoint_map` VARCHAR(1024) NOT NULL DEFAULT '' COMMENT '监听信息' AFTER `update_time`;
|
||||
|
||||
ALTER TABLE `logi_security_oplog`
|
||||
ALTER COLUMN `operation_methods` set default '';
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 6.2.4、`2.x`版本 升级至 `v3.0.0-beta.0`版本
|
||||
|
||||
**升级步骤:**
|
||||
|
||||
@@ -24,14 +136,14 @@
|
||||
UPDATE ks_km_topic
|
||||
INNER JOIN
|
||||
(SELECT
|
||||
topic.cluster_id AS cluster_id,
|
||||
topic.topic_name AS topic_name,
|
||||
topic.description AS description
|
||||
topic.cluster_id AS cluster_id,
|
||||
topic.topic_name AS topic_name,
|
||||
topic.description AS description
|
||||
FROM topic WHERE description != ''
|
||||
) AS t
|
||||
|
||||
ON ks_km_topic.cluster_phy_id = t.cluster_id
|
||||
AND ks_km_topic.topic_name = t.topic_name
|
||||
AND ks_km_topic.id > 0
|
||||
SET ks_km_topic.description = t.description;
|
||||
ON ks_km_topic.cluster_phy_id = t.cluster_id
|
||||
AND ks_km_topic.topic_name = t.topic_name
|
||||
AND ks_km_topic.id > 0
|
||||
SET ks_km_topic.description = t.description;
|
||||
```
|
||||
@@ -1,5 +1,4 @@
|
||||
|
||||
# FAQ
|
||||
# FAQ
|
||||
|
||||
## 8.1、支持哪些 Kafka 版本?
|
||||
|
||||
@@ -109,3 +108,77 @@ SECURITY.TRICK_USERS
|
||||
设置完成上面两步之后,就可以直接调用需要登录的接口了。
|
||||
|
||||
但是还有一点需要注意,绕过的用户仅能调用他有权限的接口,比如一个普通用户,那么他就只能调用普通的接口,不能去调用运维人员的接口。
|
||||
|
||||
## 8.8、Specified key was too long; max key length is 767 bytes
|
||||
|
||||
**原因:** 不同版本的 InoDB 引擎,参数‘innodb_large_prefix’默认值不同,即在 5.6 默认值为 OFF,5.7 默认值为 ON。
|
||||
|
||||
对于引擎为 InnoDB,innodb_large_prefix=OFF,且行格式为 Antelope 即支持 REDUNDANT 或 COMPACT 时,索引键前缀长度最大为 767 字节。innodb_large_prefix=ON,且行格式为 Barracuda 即支持 DYNAMIC 或 COMPRESSED 时,索引键前缀长度最大为 3072 字节。
|
||||
|
||||
**解决方案:**
|
||||
|
||||
- 减少 varchar 字符大小低于 767/4=191。
|
||||
- 将字符集改为 latin1(一个字符=一个字节)。
|
||||
- 开启‘innodb_large_prefix’,修改默认行格式‘innodb_file_format’为 Barracuda,并设置 row_format=dynamic。
|
||||
|
||||
## 8.9、出现 ESIndexNotFoundEXception 报错
|
||||
|
||||
**原因 :**没有创建 ES 索引模版
|
||||
|
||||
**解决方案:**执行 init_es_template.sh 脚本,创建 ES 索引模版即可。
|
||||
|
||||
## 8.10、km-console 打包构建失败
|
||||
|
||||
首先,**请确保您正在使用最新版本**,版本列表见 [Tags](https://github.com/didi/KnowStreaming/tags)。如果不是最新版本,请升级后再尝试有无问题。
|
||||
|
||||
常见的原因是由于工程依赖没有正常安装,导致在打包过程中缺少依赖,造成打包失败。您可以检查是否有以下文件夹,且文件夹内是否有内容
|
||||
|
||||
```
|
||||
KnowStreaming/km-console/node_modules
|
||||
KnowStreaming/km-console/packages/layout-clusters-fe/node_modules
|
||||
KnowStreaming/km-console/packages/config-manager-fe/node_modules
|
||||
```
|
||||
|
||||
如果发现没有对应的 `node_modules` 目录或着目录内容为空,说明依赖没有安装成功。请按以下步骤操作,
|
||||
|
||||
1. 手动删除上述三个文件夹(如果有)
|
||||
|
||||
2. 如果之前是通过 `mvn install` 打包 `km-console`,请到项目根目录(KnowStreaming)下重新输入该指令进行打包。观察打包过程有无报错。如有报错,请见步骤 4。
|
||||
|
||||
3. 如果是通过本地独立构建前端工程的方式(指直接执行 `npm run build`),请进入 `KnowStreaming/km-console` 目录,执行下述步骤(注意:执行时请确保您在使用 `node v12` 版本)
|
||||
|
||||
a. 执行 `npm run i`。如有报错,请见步骤 4。
|
||||
|
||||
b. 执行 `npm run build`。如有报错,请见步骤 4。
|
||||
|
||||
4. 麻烦联系我们协助解决。推荐提供以下信息,方面我们快速定位问题,示例如下。
|
||||
|
||||
```
|
||||
操作系统: Mac
|
||||
命令行终端:bash
|
||||
Node 版本: v12.22.12
|
||||
复现步骤: 1. -> 2.
|
||||
错误截图:
|
||||
```
|
||||
|
||||
## 8.11、在 `km-console` 目录下执行 `npm run start` 时看不到应用构建和热加载过程?如何启动单个应用?
|
||||
|
||||
需要到具体的应用中执行 `npm run start`,例如 `cd packages/layout-clusters-fe` 后,执行 `npm run start`。
|
||||
|
||||
应用启动后需要到基座应用中查看(需要启动基座应用,即 layout-clusters-fe)。
|
||||
|
||||
|
||||
## 8.12、权限识别失败问题
|
||||
1、使用admin账号登陆KnowStreaming时,点击系统管理-用户管理-角色管理-新增角色,查看页面是否正常。
|
||||
|
||||
<img src="http://img-ys011.didistatic.com/static/dc2img/do1_gwGfjN9N92UxzHU8dfzr" width = "400" >
|
||||
|
||||
2、查看'/logi-security/api/v1/permission/tree'接口返回值,出现如下图所示乱码现象。
|
||||

|
||||
|
||||
3、查看logi_security_permission表,看看是否出现了中文乱码现象。
|
||||
|
||||
根据以上几点,我们可以确定是由于数据库乱码造成的权限识别失败问题。
|
||||
|
||||
+ 原因:由于数据库编码和我们提供的脚本不一致,数据库里的数据发生了乱码,因此出现权限识别失败问题。
|
||||
+ 解决方案:清空数据库数据,将数据库字符集调整为utf8,最后重新执行[dml-logi.sql](https://github.com/didi/KnowStreaming/blob/master/km-dist/init/sql/dml-logi.sql)脚本导入数据即可。
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
下面是用户第一次使用我们产品的典型体验路径:
|
||||
|
||||

|
||||

|
||||
|
||||
## 5.3、常用功能
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.Topic;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.res.ClusterBrokersOverviewVO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.cluster.res.ClusterBrokersStateVO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.kafkacontroller.KafkaControllerVO;
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.KafkaConstant;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.SortTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.PaginationMetricsUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.PaginationUtil;
|
||||
@@ -71,6 +72,9 @@ public class ClusterBrokersManagerImpl implements ClusterBrokersManager {
|
||||
Topic groupTopic = topicService.getTopic(clusterPhyId, org.apache.kafka.common.internals.Topic.GROUP_METADATA_TOPIC_NAME);
|
||||
Topic transactionTopic = topicService.getTopic(clusterPhyId, org.apache.kafka.common.internals.Topic.TRANSACTION_STATE_TOPIC_NAME);
|
||||
|
||||
//获取controller信息
|
||||
KafkaController kafkaController = kafkaControllerService.getKafkaControllerFromDB(clusterPhyId);
|
||||
|
||||
// 格式转换
|
||||
return PaginationResult.buildSuc(
|
||||
this.convert2ClusterBrokersOverviewVOList(
|
||||
@@ -78,7 +82,8 @@ public class ClusterBrokersManagerImpl implements ClusterBrokersManager {
|
||||
brokerList,
|
||||
metricsResult.getData(),
|
||||
groupTopic,
|
||||
transactionTopic
|
||||
transactionTopic,
|
||||
kafkaController
|
||||
),
|
||||
paginationResult
|
||||
);
|
||||
@@ -159,7 +164,8 @@ public class ClusterBrokersManagerImpl implements ClusterBrokersManager {
|
||||
List<Broker> brokerList,
|
||||
List<BrokerMetrics> metricsList,
|
||||
Topic groupTopic,
|
||||
Topic transactionTopic) {
|
||||
Topic transactionTopic,
|
||||
KafkaController kafkaController) {
|
||||
Map<Integer, BrokerMetrics> metricsMap = metricsList == null? new HashMap<>(): metricsList.stream().collect(Collectors.toMap(BrokerMetrics::getBrokerId, Function.identity()));
|
||||
|
||||
Map<Integer, Broker> brokerMap = brokerList == null? new HashMap<>(): brokerList.stream().collect(Collectors.toMap(Broker::getBrokerId, Function.identity()));
|
||||
@@ -169,12 +175,12 @@ public class ClusterBrokersManagerImpl implements ClusterBrokersManager {
|
||||
Broker broker = brokerMap.get(brokerId);
|
||||
BrokerMetrics brokerMetrics = metricsMap.get(brokerId);
|
||||
|
||||
voList.add(this.convert2ClusterBrokersOverviewVO(brokerId, broker, brokerMetrics, groupTopic, transactionTopic));
|
||||
voList.add(this.convert2ClusterBrokersOverviewVO(brokerId, broker, brokerMetrics, groupTopic, transactionTopic, kafkaController));
|
||||
}
|
||||
return voList;
|
||||
}
|
||||
|
||||
private ClusterBrokersOverviewVO convert2ClusterBrokersOverviewVO(Integer brokerId, Broker broker, BrokerMetrics brokerMetrics, Topic groupTopic, Topic transactionTopic) {
|
||||
private ClusterBrokersOverviewVO convert2ClusterBrokersOverviewVO(Integer brokerId, Broker broker, BrokerMetrics brokerMetrics, Topic groupTopic, Topic transactionTopic, KafkaController kafkaController) {
|
||||
ClusterBrokersOverviewVO clusterBrokersOverviewVO = new ClusterBrokersOverviewVO();
|
||||
clusterBrokersOverviewVO.setBrokerId(brokerId);
|
||||
if (broker != null) {
|
||||
@@ -192,6 +198,9 @@ public class ClusterBrokersManagerImpl implements ClusterBrokersManager {
|
||||
if (transactionTopic != null && transactionTopic.getBrokerIdSet().contains(brokerId)) {
|
||||
clusterBrokersOverviewVO.getKafkaRoleList().add(transactionTopic.getTopicName());
|
||||
}
|
||||
if (kafkaController != null && kafkaController.getBrokerId().equals(brokerId)) {
|
||||
clusterBrokersOverviewVO.getKafkaRoleList().add(KafkaConstant.CONTROLLER_ROLE);
|
||||
}
|
||||
|
||||
clusterBrokersOverviewVO.setLatestMetrics(brokerMetrics);
|
||||
return clusterBrokersOverviewVO;
|
||||
|
||||
@@ -19,7 +19,8 @@ import com.xiaojukeji.know.streaming.km.common.bean.vo.group.GroupTopicConsumedD
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.group.GroupTopicOverviewVO;
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.AggTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.GroupOffsetResetEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.OffsetTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.group.GroupStateEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.exception.AdminOperateException;
|
||||
import com.xiaojukeji.know.streaming.km.common.exception.NotExistException;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
@@ -75,7 +76,7 @@ public class GroupManagerImpl implements GroupManager {
|
||||
}
|
||||
|
||||
if (!paginationResult.hasData()) {
|
||||
return PaginationResult.buildSuc(dto);
|
||||
return PaginationResult.buildSuc(new ArrayList<>(), paginationResult);
|
||||
}
|
||||
|
||||
// 获取指标
|
||||
@@ -171,7 +172,7 @@ public class GroupManagerImpl implements GroupManager {
|
||||
}
|
||||
|
||||
if (!ConsumerGroupState.EMPTY.equals(description.state()) && !ConsumerGroupState.DEAD.equals(description.state())) {
|
||||
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_OPERATE_FAILED, String.format("group处于%s, 重置失败(仅Empty情况可重置)", description.state().name()));
|
||||
return Result.buildFromRSAndMsg(ResultStatus.KAFKA_OPERATE_FAILED, String.format("group处于%s, 重置失败(仅Empty情况可重置)", GroupStateEnum.getByRawState(description.state()).getState()));
|
||||
}
|
||||
|
||||
// 获取offset
|
||||
@@ -198,12 +199,12 @@ public class GroupManagerImpl implements GroupManager {
|
||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getTopicNotExist(dto.getClusterId(), dto.getTopicName()));
|
||||
}
|
||||
|
||||
if (GroupOffsetResetEnum.PRECISE_OFFSET.getResetType() == dto.getResetType()
|
||||
if (OffsetTypeEnum.PRECISE_OFFSET.getResetType() == dto.getResetType()
|
||||
&& ValidateUtils.isEmptyList(dto.getOffsetList())) {
|
||||
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "参数错误,指定offset重置需传offset信息");
|
||||
}
|
||||
|
||||
if (GroupOffsetResetEnum.PRECISE_TIMESTAMP.getResetType() == dto.getResetType()
|
||||
if (OffsetTypeEnum.PRECISE_TIMESTAMP.getResetType() == dto.getResetType()
|
||||
&& ValidateUtils.isNull(dto.getTimestamp())) {
|
||||
return Result.buildFromRSAndMsg(ResultStatus.PARAM_ILLEGAL, "参数错误,指定时间重置需传时间信息");
|
||||
}
|
||||
@@ -212,7 +213,7 @@ public class GroupManagerImpl implements GroupManager {
|
||||
}
|
||||
|
||||
private Result<Map<TopicPartition, Long>> getPartitionOffset(GroupOffsetResetDTO dto) {
|
||||
if (GroupOffsetResetEnum.PRECISE_OFFSET.getResetType() == dto.getResetType()) {
|
||||
if (OffsetTypeEnum.PRECISE_OFFSET.getResetType() == dto.getResetType()) {
|
||||
return Result.buildSuc(dto.getOffsetList().stream().collect(Collectors.toMap(
|
||||
elem -> new TopicPartition(dto.getTopicName(), elem.getPartitionId()),
|
||||
PartitionOffsetDTO::getOffset,
|
||||
@@ -221,9 +222,9 @@ public class GroupManagerImpl implements GroupManager {
|
||||
}
|
||||
|
||||
OffsetSpec offsetSpec = null;
|
||||
if (GroupOffsetResetEnum.PRECISE_TIMESTAMP.getResetType() == dto.getResetType()) {
|
||||
if (OffsetTypeEnum.PRECISE_TIMESTAMP.getResetType() == dto.getResetType()) {
|
||||
offsetSpec = OffsetSpec.forTimestamp(dto.getTimestamp());
|
||||
} else if (GroupOffsetResetEnum.EARLIEST.getResetType() == dto.getResetType()) {
|
||||
} else if (OffsetTypeEnum.EARLIEST.getResetType() == dto.getResetType()) {
|
||||
offsetSpec = OffsetSpec.earliest();
|
||||
} else {
|
||||
offsetSpec = OffsetSpec.latest();
|
||||
@@ -271,15 +272,11 @@ public class GroupManagerImpl implements GroupManager {
|
||||
|
||||
|
||||
// 获取Group指标信息
|
||||
Result<List<GroupMetrics>> groupMetricsResult = groupMetricService.listPartitionLatestMetricsFromES(
|
||||
clusterPhyId,
|
||||
groupName,
|
||||
topicName,
|
||||
latestMetricNames == null? Arrays.asList(): latestMetricNames
|
||||
);
|
||||
Result<List<GroupMetrics>> groupMetricsResult = groupMetricService.collectGroupMetricsFromKafka(clusterPhyId, groupName, latestMetricNames == null ? Arrays.asList() : latestMetricNames);
|
||||
|
||||
|
||||
// 转换Group指标
|
||||
List<GroupMetrics> esGroupMetricsList = groupMetricsResult.hasData()? groupMetricsResult.getData(): new ArrayList<>();
|
||||
List<GroupMetrics> esGroupMetricsList = groupMetricsResult.hasData() ? groupMetricsResult.getData().stream().filter(elem -> topicName.equals(elem.getTopic())).collect(Collectors.toList()) : new ArrayList<>();
|
||||
Map<Integer, GroupMetrics> esMetricsMap = new HashMap<>();
|
||||
for (GroupMetrics groupMetrics: esGroupMetricsList) {
|
||||
esMetricsMap.put(groupMetrics.getPartitionId(), groupMetrics);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.xiaojukeji.know.streaming.km.biz.topic;
|
||||
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationSortDTO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.topic.TopicRecordDTO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.TopicBrokersPartitionsSummaryVO;
|
||||
|
||||
@@ -22,25 +22,26 @@ import com.xiaojukeji.know.streaming.km.common.bean.vo.topic.partition.TopicPart
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.KafkaConstant;
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
|
||||
import com.xiaojukeji.know.streaming.km.common.converter.PartitionConverter;
|
||||
import com.xiaojukeji.know.streaming.km.common.converter.TopicVOConverter;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.OffsetTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.SortTypeEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.exception.AdminOperateException;
|
||||
import com.xiaojukeji.know.streaming.km.common.exception.NotExistException;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.PaginationUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ValidateUtils;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.broker.BrokerService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.cluster.ClusterPhyService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionMetricService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicConfigService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.partition.PartitionService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicConfigService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicMetricService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService;
|
||||
import com.xiaojukeji.know.streaming.km.core.service.version.metrics.TopicMetricVersionItems;
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.kafka.clients.admin.OffsetSpec;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
||||
import org.apache.kafka.clients.consumer.KafkaConsumer;
|
||||
import org.apache.kafka.clients.consumer.*;
|
||||
import org.apache.kafka.common.TopicPartition;
|
||||
import org.apache.kafka.common.config.TopicConfig;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@@ -129,7 +130,12 @@ public class TopicStateManagerImpl implements TopicStateManager {
|
||||
return Result.buildFromRSAndMsg(ResultStatus.NOT_EXIST, MsgConstant.getClusterPhyNotExist(clusterPhyId));
|
||||
}
|
||||
|
||||
// 获取分区offset
|
||||
// 获取分区beginOffset
|
||||
Result<Map<TopicPartition, Long>> beginOffsetsMapResult = partitionService.getPartitionOffsetFromKafka(clusterPhyId, topicName, dto.getFilterPartitionId(), OffsetSpec.earliest(), null);
|
||||
if (beginOffsetsMapResult.failed()) {
|
||||
return Result.buildFromIgnoreData(beginOffsetsMapResult);
|
||||
}
|
||||
// 获取分区endOffset
|
||||
Result<Map<TopicPartition, Long>> endOffsetsMapResult = partitionService.getPartitionOffsetFromKafka(clusterPhyId, topicName, dto.getFilterPartitionId(), OffsetSpec.latest(), null);
|
||||
if (endOffsetsMapResult.failed()) {
|
||||
return Result.buildFromIgnoreData(endOffsetsMapResult);
|
||||
@@ -142,13 +148,48 @@ public class TopicStateManagerImpl implements TopicStateManager {
|
||||
// 创建kafka-consumer
|
||||
kafkaConsumer = new KafkaConsumer<>(this.generateClientProperties(clusterPhy, dto.getMaxRecords()));
|
||||
|
||||
kafkaConsumer.assign(endOffsetsMapResult.getData().keySet());
|
||||
for (Map.Entry<TopicPartition, Long> entry: endOffsetsMapResult.getData().entrySet()) {
|
||||
kafkaConsumer.seek(entry.getKey(), Math.max(0, entry.getValue() - dto.getMaxRecords()));
|
||||
List<TopicPartition> partitionList = new ArrayList<>();
|
||||
long maxMessage = 0;
|
||||
for (Map.Entry<TopicPartition, Long> entry : endOffsetsMapResult.getData().entrySet()) {
|
||||
long begin = beginOffsetsMapResult.getData().get(entry.getKey());
|
||||
long end = entry.getValue();
|
||||
if (begin == end){
|
||||
continue;
|
||||
}
|
||||
maxMessage += end - begin;
|
||||
partitionList.add(entry.getKey());
|
||||
}
|
||||
maxMessage = Math.min(maxMessage, dto.getMaxRecords());
|
||||
kafkaConsumer.assign(partitionList);
|
||||
|
||||
Map<TopicPartition, OffsetAndTimestamp> partitionOffsetAndTimestampMap = new HashMap<>();
|
||||
// 获取指定时间每个分区的offset(按指定开始时间查询消息时)
|
||||
if (OffsetTypeEnum.PRECISE_TIMESTAMP.getResetType() == dto.getFilterOffsetReset()) {
|
||||
Map<TopicPartition, Long> timestampsToSearch = new HashMap<>();
|
||||
partitionList.forEach(topicPartition -> {
|
||||
timestampsToSearch.put(topicPartition, dto.getStartTimestampUnitMs());
|
||||
});
|
||||
partitionOffsetAndTimestampMap = kafkaConsumer.offsetsForTimes(timestampsToSearch);
|
||||
}
|
||||
|
||||
for (TopicPartition partition : partitionList) {
|
||||
if (OffsetTypeEnum.EARLIEST.getResetType() == dto.getFilterOffsetReset()) {
|
||||
// 重置到最旧
|
||||
kafkaConsumer.seek(partition, beginOffsetsMapResult.getData().get(partition));
|
||||
} else if (OffsetTypeEnum.PRECISE_TIMESTAMP.getResetType() == dto.getFilterOffsetReset()) {
|
||||
// 重置到指定时间
|
||||
kafkaConsumer.seek(partition, partitionOffsetAndTimestampMap.get(partition).offset());
|
||||
} else if (OffsetTypeEnum.PRECISE_OFFSET.getResetType() == dto.getFilterOffsetReset()) {
|
||||
// 重置到指定位置
|
||||
|
||||
} else {
|
||||
// 默认,重置到最新
|
||||
kafkaConsumer.seek(partition, Math.max(beginOffsetsMapResult.getData().get(partition), endOffsetsMapResult.getData().get(partition) - dto.getMaxRecords()));
|
||||
}
|
||||
}
|
||||
|
||||
// 这里需要减去 KafkaConstant.POLL_ONCE_TIMEOUT_UNIT_MS 是因为poll一次需要耗时,如果这里不减去,则可能会导致poll之后,超过要求的时间
|
||||
while (System.currentTimeMillis() - startTime + KafkaConstant.POLL_ONCE_TIMEOUT_UNIT_MS <= dto.getPullTimeoutUnitMs() && voList.size() < dto.getMaxRecords()) {
|
||||
while (System.currentTimeMillis() - startTime <= dto.getPullTimeoutUnitMs() && voList.size() < maxMessage) {
|
||||
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofMillis(KafkaConstant.POLL_ONCE_TIMEOUT_UNIT_MS));
|
||||
for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
|
||||
if (this.checkIfIgnore(consumerRecord, dto.getFilterKey(), dto.getFilterValue())) {
|
||||
@@ -168,6 +209,15 @@ public class TopicStateManagerImpl implements TopicStateManager {
|
||||
}
|
||||
}
|
||||
|
||||
// 排序
|
||||
if (ObjectUtils.isNotEmpty(voList)) {
|
||||
// 默认按时间倒序排序
|
||||
if (StringUtils.isBlank(dto.getSortType())) {
|
||||
dto.setSortType(SortTypeEnum.DESC.getSortType());
|
||||
}
|
||||
PaginationUtil.pageBySort(voList, dto.getSortField(), dto.getSortType());
|
||||
}
|
||||
|
||||
return Result.buildSuc(voList.subList(0, Math.min(dto.getMaxRecords(), voList.size())));
|
||||
} catch (Exception e) {
|
||||
log.error("method=getTopicMessages||clusterPhyId={}||topicName={}||param={}||errMsg=exception", clusterPhyId, topicName, dto, e);
|
||||
|
||||
@@ -7,12 +7,14 @@ import com.didiglobal.logi.log.LogFactory;
|
||||
import com.didiglobal.logi.security.common.dto.config.ConfigDTO;
|
||||
import com.didiglobal.logi.security.service.ConfigService;
|
||||
import com.xiaojukeji.know.streaming.km.biz.version.VersionControlManager;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.MetricDetailDTO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.metrices.UserMetricConfigDTO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.metric.UserMetricConfig;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.version.VersionControlItem;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.config.metric.UserMetricConfigVO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.vo.version.VersionItemVO;
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.version.VersionEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.VersionUtil;
|
||||
@@ -47,29 +49,29 @@ public class VersionControlManagerImpl implements VersionControlManager {
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_HEALTH_SCORE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_TOTAL_PRODUCE_REQUESTS, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_FAILED_FETCH_REQ, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_FAILED_PRODUCE_REQ, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_MESSAGE_IN, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_UNDER_REPLICA_PARTITIONS, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_TOTAL_PRODUCE_REQUESTS, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_BYTES_IN, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_BYTES_OUT, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_BYTES_REJECTED, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_TOPIC.getCode(), TOPIC_METRIC_MESSAGE_IN, true));
|
||||
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_HEALTH_SCORE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_REQ_QUEUE_SIZE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_RES_QUEUE_SIZE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_ACTIVE_CONTROLLER_COUNT, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_PRODUCE_REQ, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_LOG_SIZE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_CONNECTIONS, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_MESSAGES_IN, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_BYTES_IN, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_BYTES_OUT, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_GROUP_REBALANCES, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_JOB_RUNNING, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_CONNECTIONS, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_MESSAGES_IN, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_PARTITIONS_NO_LEADER, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_PARTITION_URP, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_LOG_SIZE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_PRODUCE_REQ, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_REQ_QUEUE_SIZE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_TOTAL_RES_QUEUE_SIZE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_GROUP_REBALANCES, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_CLUSTER.getCode(), CLUSTER_METRIC_JOB_RUNNING, true));
|
||||
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_GROUP.getCode(), GROUP_METRIC_OFFSET_CONSUMED, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_GROUP.getCode(), GROUP_METRIC_LAG, true));
|
||||
@@ -77,18 +79,18 @@ public class VersionControlManagerImpl implements VersionControlManager {
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_GROUP.getCode(), GROUP_METRIC_HEALTH_SCORE, true));
|
||||
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_HEALTH_SCORE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_REQ_QUEUE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_RES_QUEUE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_CONNECTION_COUNT, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_MESSAGE_IN, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_PRODUCE_REQ, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_NETWORK_RPO_AVG_IDLE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_REQ_AVG_IDLE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_CONNECTION_COUNT, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_BYTES_IN, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_BYTES_OUT, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_PARTITIONS_SKEW, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_PRODUCE_REQ, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_REQ_QUEUE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_TOTAL_RES_QUEUE, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_LEADERS_SKEW, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_UNDER_REPLICATE_PARTITION, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_PARTITIONS_SKEW, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_BYTES_IN, true));
|
||||
defaultMetrics.add(new UserMetricConfig(METRIC_BROKER.getCode(), BROKER_METRIC_BYTES_OUT, true));
|
||||
}
|
||||
|
||||
@Autowired
|
||||
@@ -159,6 +161,9 @@ public class VersionControlManagerImpl implements VersionControlManager {
|
||||
|
||||
UserMetricConfig umc = userMetricConfigMap.get(itemType + "@" + metric);
|
||||
userMetricConfigVO.setSet(null != umc && umc.isSet());
|
||||
if (umc != null) {
|
||||
userMetricConfigVO.setRank(umc.getRank());
|
||||
}
|
||||
userMetricConfigVO.setName(itemVO.getName());
|
||||
userMetricConfigVO.setType(itemVO.getType());
|
||||
userMetricConfigVO.setDesc(itemVO.getDesc());
|
||||
@@ -178,13 +183,29 @@ public class VersionControlManagerImpl implements VersionControlManager {
|
||||
@Override
|
||||
public Result<Void> updateUserMetricItem(Long clusterId, Integer type, UserMetricConfigDTO dto, String operator) {
|
||||
Map<String, Boolean> metricsSetMap = dto.getMetricsSet();
|
||||
if(null == metricsSetMap || metricsSetMap.isEmpty()){
|
||||
|
||||
//转换metricDetailDTOList
|
||||
List<MetricDetailDTO> metricDetailDTOList = dto.getMetricDetailDTOList();
|
||||
Map<String, MetricDetailDTO> metricDetailMap = new HashMap<>();
|
||||
if (metricDetailDTOList != null && !metricDetailDTOList.isEmpty()) {
|
||||
metricDetailMap = metricDetailDTOList.stream().collect(Collectors.toMap(MetricDetailDTO::getMetric, Function.identity()));
|
||||
}
|
||||
|
||||
//转换metricsSetMap
|
||||
if (metricsSetMap != null && !metricsSetMap.isEmpty()) {
|
||||
for (Map.Entry<String, Boolean> metricAndShowEntry : metricsSetMap.entrySet()) {
|
||||
if (metricDetailMap.containsKey(metricAndShowEntry.getKey())) continue;
|
||||
metricDetailMap.put(metricAndShowEntry.getKey(), new MetricDetailDTO(metricAndShowEntry.getKey(), metricAndShowEntry.getValue(), null));
|
||||
}
|
||||
}
|
||||
|
||||
if (metricDetailMap.isEmpty()) {
|
||||
return Result.buildSuc();
|
||||
}
|
||||
|
||||
Set<UserMetricConfig> userMetricConfigs = getUserMetricConfig(operator);
|
||||
for(Map.Entry<String, Boolean> metricAndShowEntry : metricsSetMap.entrySet()){
|
||||
UserMetricConfig userMetricConfig = new UserMetricConfig(type, metricAndShowEntry.getKey(), metricAndShowEntry.getValue());
|
||||
for (MetricDetailDTO metricDetailDTO : metricDetailMap.values()) {
|
||||
UserMetricConfig userMetricConfig = new UserMetricConfig(type, metricDetailDTO.getMetric(), metricDetailDTO.getSet(), metricDetailDTO.getRank());
|
||||
userMetricConfigs.remove(userMetricConfig);
|
||||
userMetricConfigs.add(userMetricConfig);
|
||||
}
|
||||
@@ -228,7 +249,7 @@ public class VersionControlManagerImpl implements VersionControlManager {
|
||||
return defaultMetrics;
|
||||
}
|
||||
|
||||
return JSON.parseObject(value, new TypeReference<Set<UserMetricConfig>>(){});
|
||||
return JSON.parseObject(value, new TypeReference<Set<UserMetricConfig>>() {});
|
||||
}
|
||||
|
||||
public static void main(String[] args){
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.metric;
|
||||
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.*;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.BaseESPO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.*;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.metric.KafkaMetricIndexEnum;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.NamedThreadFactory;
|
||||
import com.xiaojukeji.know.streaming.km.persistence.es.dao.BaseMetricESDAO;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.LinkedBlockingDeque;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@Component
|
||||
public class MetricESSender implements ApplicationListener<BaseMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
|
||||
private static final int THRESHOLD = 100;
|
||||
|
||||
private ThreadPoolExecutor esExecutor = new ThreadPoolExecutor(10, 20, 6000, TimeUnit.MILLISECONDS,
|
||||
new LinkedBlockingDeque<>(1000),
|
||||
new NamedThreadFactory("KM-Collect-MetricESSender-ES"),
|
||||
(r, e) -> LOGGER.warn("class=MetricESSender||msg=KM-Collect-MetricESSender-ES Deque is blocked, taskCount:{}" + e.getTaskCount()));
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=MetricESSender||method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onApplicationEvent(BaseMetricEvent event) {
|
||||
if(event instanceof BrokerMetricEvent) {
|
||||
BrokerMetricEvent brokerMetricEvent = (BrokerMetricEvent)event;
|
||||
send2es(KafkaMetricIndexEnum.BROKER_INFO,
|
||||
ConvertUtil.list2List(brokerMetricEvent.getBrokerMetrics(), BrokerMetricPO.class)
|
||||
);
|
||||
|
||||
} else if(event instanceof ClusterMetricEvent) {
|
||||
ClusterMetricEvent clusterMetricEvent = (ClusterMetricEvent)event;
|
||||
send2es(KafkaMetricIndexEnum.CLUSTER_INFO,
|
||||
ConvertUtil.list2List(clusterMetricEvent.getClusterMetrics(), ClusterMetricPO.class)
|
||||
);
|
||||
|
||||
} else if(event instanceof TopicMetricEvent) {
|
||||
TopicMetricEvent topicMetricEvent = (TopicMetricEvent)event;
|
||||
send2es(KafkaMetricIndexEnum.TOPIC_INFO,
|
||||
ConvertUtil.list2List(topicMetricEvent.getTopicMetrics(), TopicMetricPO.class)
|
||||
);
|
||||
|
||||
} else if(event instanceof PartitionMetricEvent) {
|
||||
PartitionMetricEvent partitionMetricEvent = (PartitionMetricEvent)event;
|
||||
send2es(KafkaMetricIndexEnum.PARTITION_INFO,
|
||||
ConvertUtil.list2List(partitionMetricEvent.getPartitionMetrics(), PartitionMetricPO.class)
|
||||
);
|
||||
|
||||
} else if(event instanceof GroupMetricEvent) {
|
||||
GroupMetricEvent groupMetricEvent = (GroupMetricEvent)event;
|
||||
send2es(KafkaMetricIndexEnum.GROUP_INFO,
|
||||
ConvertUtil.list2List(groupMetricEvent.getGroupMetrics(), GroupMetricPO.class)
|
||||
);
|
||||
|
||||
} else if(event instanceof ReplicaMetricEvent) {
|
||||
ReplicaMetricEvent replicaMetricEvent = (ReplicaMetricEvent)event;
|
||||
send2es(KafkaMetricIndexEnum.REPLICATION_INFO,
|
||||
ConvertUtil.list2List(replicaMetricEvent.getReplicationMetrics(), ReplicationMetricPO.class)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据不同监控维度来发送
|
||||
*/
|
||||
private boolean send2es(KafkaMetricIndexEnum stats, List<? extends BaseESPO> statsList){
|
||||
if (CollectionUtils.isEmpty(statsList)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!EnvUtil.isOnline()) {
|
||||
LOGGER.info("class=MetricESSender||method=send2es||ariusStats={}||size={}",
|
||||
stats.getIndex(), statsList.size());
|
||||
}
|
||||
|
||||
BaseMetricESDAO baseMetricESDao = BaseMetricESDAO.getByStatsType(stats);
|
||||
if (Objects.isNull( baseMetricESDao )) {
|
||||
LOGGER.error("class=MetricESSender||method=send2es||errMsg=fail to find {}", stats.getIndex());
|
||||
return false;
|
||||
}
|
||||
|
||||
int size = statsList.size();
|
||||
int num = (size) % THRESHOLD == 0 ? (size / THRESHOLD) : (size / THRESHOLD + 1);
|
||||
|
||||
if (size < THRESHOLD) {
|
||||
esExecutor.execute(
|
||||
() -> baseMetricESDao.batchInsertStats(statsList)
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
for (int i = 1; i < num + 1; i++) {
|
||||
int end = (i * THRESHOLD) > size ? size : (i * THRESHOLD);
|
||||
int start = (i - 1) * THRESHOLD;
|
||||
|
||||
esExecutor.execute(
|
||||
() -> baseMetricESDao.batchInsertStats(statsList.subList(start, end))
|
||||
);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.sink;
|
||||
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.BaseESPO;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.EnvUtil;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.NamedThreadFactory;
|
||||
import com.xiaojukeji.know.streaming.km.persistence.es.dao.BaseMetricESDAO;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.LinkedBlockingDeque;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public abstract class AbstractMetricESSender {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
|
||||
private static final int THRESHOLD = 100;
|
||||
|
||||
private static final ThreadPoolExecutor esExecutor = new ThreadPoolExecutor(
|
||||
10,
|
||||
20,
|
||||
6000,
|
||||
TimeUnit.MILLISECONDS,
|
||||
new LinkedBlockingDeque<>(1000),
|
||||
new NamedThreadFactory("KM-Collect-MetricESSender-ES"),
|
||||
(r, e) -> LOGGER.warn("class=MetricESSender||msg=KM-Collect-MetricESSender-ES Deque is blocked, taskCount:{}" + e.getTaskCount())
|
||||
);
|
||||
|
||||
/**
|
||||
* 根据不同监控维度来发送
|
||||
*/
|
||||
protected boolean send2es(String index, List<? extends BaseESPO> statsList){
|
||||
if (CollectionUtils.isEmpty(statsList)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!EnvUtil.isOnline()) {
|
||||
LOGGER.info("class=MetricESSender||method=send2es||ariusStats={}||size={}",
|
||||
index, statsList.size());
|
||||
}
|
||||
|
||||
BaseMetricESDAO baseMetricESDao = BaseMetricESDAO.getByStatsType(index);
|
||||
if (Objects.isNull( baseMetricESDao )) {
|
||||
LOGGER.error("class=MetricESSender||method=send2es||errMsg=fail to find {}", index);
|
||||
return false;
|
||||
}
|
||||
|
||||
int size = statsList.size();
|
||||
int num = (size) % THRESHOLD == 0 ? (size / THRESHOLD) : (size / THRESHOLD + 1);
|
||||
|
||||
if (size < THRESHOLD) {
|
||||
esExecutor.execute(
|
||||
() -> baseMetricESDao.batchInsertStats(statsList)
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
for (int i = 1; i < num + 1; i++) {
|
||||
int end = (i * THRESHOLD) > size ? size : (i * THRESHOLD);
|
||||
int start = (i - 1) * THRESHOLD;
|
||||
|
||||
esExecutor.execute(
|
||||
() -> baseMetricESDao.batchInsertStats(statsList.subList(start, end))
|
||||
);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.sink;
|
||||
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.BrokerMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.BrokerMetricPO;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.BROKER_INDEX;
|
||||
|
||||
@Component
|
||||
public class BrokerMetricESSender extends AbstractMetricESSender implements ApplicationListener<BrokerMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=BrokerMetricESSender||method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onApplicationEvent(BrokerMetricEvent event) {
|
||||
send2es(BROKER_INDEX, ConvertUtil.list2List(event.getBrokerMetrics(), BrokerMetricPO.class));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.sink;
|
||||
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.ClusterMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.ClusterMetricPO;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
|
||||
import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.CLUSTER_INDEX;
|
||||
|
||||
@Component
|
||||
public class ClusterMetricESSender extends AbstractMetricESSender implements ApplicationListener<ClusterMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=ClusterMetricESSender||method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onApplicationEvent(ClusterMetricEvent event) {
|
||||
send2es(CLUSTER_INDEX, ConvertUtil.list2List(event.getClusterMetrics(), ClusterMetricPO.class));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.sink;
|
||||
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.GroupMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.GroupMetricPO;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
|
||||
import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.GROUP_INDEX;
|
||||
|
||||
@Component
|
||||
public class GroupMetricESSender extends AbstractMetricESSender implements ApplicationListener<GroupMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=GroupMetricESSender||method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onApplicationEvent(GroupMetricEvent event) {
|
||||
send2es(GROUP_INDEX, ConvertUtil.list2List(event.getGroupMetrics(), GroupMetricPO.class));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.sink;
|
||||
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.PartitionMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.PartitionMetricPO;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.PARTITION_INDEX;
|
||||
|
||||
@Component
|
||||
public class PartitionMetricESSender extends AbstractMetricESSender implements ApplicationListener<PartitionMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=PartitionMetricESSender||method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onApplicationEvent(PartitionMetricEvent event) {
|
||||
send2es(PARTITION_INDEX, ConvertUtil.list2List(event.getPartitionMetrics(), PartitionMetricPO.class));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.sink;
|
||||
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.ReplicaMetricEvent;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.ReplicationMetricPO;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.REPLICATION_INDEX;
|
||||
|
||||
@Component
|
||||
public class ReplicaMetricESSender extends AbstractMetricESSender implements ApplicationListener<ReplicaMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=GroupMetricESSender||method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onApplicationEvent(ReplicaMetricEvent event) {
|
||||
send2es(REPLICATION_INDEX, ConvertUtil.list2List(event.getReplicationMetrics(), ReplicationMetricPO.class));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package com.xiaojukeji.know.streaming.km.collector.sink;
|
||||
|
||||
import com.didiglobal.logi.log.ILog;
|
||||
import com.didiglobal.logi.log.LogFactory;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.event.metric.*;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.metrice.*;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
|
||||
import static com.xiaojukeji.know.streaming.km.common.constant.ESIndexConstant.TOPIC_INDEX;
|
||||
|
||||
@Component
|
||||
public class TopicMetricESSender extends AbstractMetricESSender implements ApplicationListener<TopicMetricEvent> {
|
||||
protected static final ILog LOGGER = LogFactory.getLog("METRIC_LOGGER");
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
LOGGER.info("class=TopicMetricESSender||method=init||msg=init finished");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onApplicationEvent(TopicMetricEvent event) {
|
||||
send2es(TOPIC_INDEX, ConvertUtil.list2List(event.getTopicMetrics(), TopicMetricPO.class));
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package com.xiaojukeji.know.streaming.km.common.bean.dto.cluster;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.BaseDTO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.JmxConfig;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.config.ZKConfig;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
@@ -34,4 +35,8 @@ public class ClusterPhyBaseDTO extends BaseDTO {
|
||||
@NotNull(message = "jmxProperties不允许为空")
|
||||
@ApiModelProperty(value="Jmx配置")
|
||||
protected JmxConfig jmxProperties;
|
||||
|
||||
// TODO 前端页面增加时,需要加一个不为空的限制
|
||||
@ApiModelProperty(value="ZK配置")
|
||||
protected ZKConfig zkProperties;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.xiaojukeji.know.streaming.km.common.bean.dto.group;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.partition.PartitionOffsetDTO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.topic.ClusterTopicDTO;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.OffsetTypeEnum;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
@@ -23,7 +24,7 @@ public class GroupOffsetResetDTO extends ClusterTopicDTO {
|
||||
private String groupName;
|
||||
|
||||
/**
|
||||
* @see com.xiaojukeji.know.streaming.km.common.enums.GroupOffsetResetEnum
|
||||
* @see OffsetTypeEnum
|
||||
*/
|
||||
@NotNull(message = "resetType不允许为空")
|
||||
@ApiModelProperty(value = "重置方式", example = "1")
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.dto.metrices;
|
||||
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.BaseDTO;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import javax.validation.constraints.NotNull;
|
||||
|
||||
|
||||
/**
|
||||
* @author didi
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@ApiModel(description = "指标详细属性信息")
|
||||
public class MetricDetailDTO extends BaseDTO {
|
||||
|
||||
@ApiModelProperty("指标名称")
|
||||
private String metric;
|
||||
|
||||
@ApiModelProperty("指标是否显示")
|
||||
private Boolean set;
|
||||
|
||||
@NotNull(message = "MetricDetailDTO的rank字段应不为空")
|
||||
@ApiModelProperty("指标优先级")
|
||||
private Integer rank;
|
||||
|
||||
}
|
||||
@@ -7,6 +7,8 @@ import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import javax.validation.Valid;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
@@ -17,4 +19,8 @@ import java.util.Map;
|
||||
public class UserMetricConfigDTO extends BaseDTO {
|
||||
@ApiModelProperty("指标展示设置项,key:指标名;value:是否展现(true展现/false不展现)")
|
||||
private Map<String, Boolean> metricsSet;
|
||||
|
||||
@Valid
|
||||
@ApiModelProperty("指标自定义属性列表")
|
||||
private List<MetricDetailDTO> metricDetailDTOList;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.dto.topic;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.BaseDTO;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.dto.pagination.PaginationSortDTO;
|
||||
import com.xiaojukeji.know.streaming.km.common.enums.OffsetTypeEnum;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
@@ -15,7 +16,7 @@ import javax.validation.constraints.NotNull;
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
@ApiModel(description = "Topic记录")
|
||||
public class TopicRecordDTO extends BaseDTO {
|
||||
public class TopicRecordDTO extends PaginationSortDTO {
|
||||
@NotNull(message = "truncate不允许为空")
|
||||
@ApiModelProperty(value = "是否截断", example = "true")
|
||||
private Boolean truncate;
|
||||
@@ -34,4 +35,13 @@ public class TopicRecordDTO extends BaseDTO {
|
||||
|
||||
@ApiModelProperty(value = "预览超时时间", example = "10000")
|
||||
private Long pullTimeoutUnitMs = 8000L;
|
||||
|
||||
/**
|
||||
* @see OffsetTypeEnum
|
||||
*/
|
||||
@ApiModelProperty(value = "offset", example = "")
|
||||
private Integer filterOffsetReset = 0;
|
||||
|
||||
@ApiModelProperty(value = "开始日期时间戳", example = "")
|
||||
private Long startTimestampUnitMs;
|
||||
}
|
||||
|
||||
@@ -1,12 +1,17 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.broker;
|
||||
|
||||
import com.xiaojukeji.know.streaming.km.common.zookeeper.znode.brokers.BrokerMetadata;
|
||||
|
||||
import com.alibaba.fastjson.TypeReference;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.common.IpPortData;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.po.broker.BrokerPO;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import org.apache.kafka.common.Node;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author didi
|
||||
@@ -55,6 +60,11 @@ public class Broker implements Serializable {
|
||||
*/
|
||||
private Integer status;
|
||||
|
||||
/**
|
||||
* 监听信息
|
||||
*/
|
||||
private Map<String, IpPortData> endpointMap;
|
||||
|
||||
public static Broker buildFrom(Long clusterPhyId, Node node, Long startTimestamp) {
|
||||
Broker metadata = new Broker();
|
||||
metadata.setClusterPhyId(clusterPhyId);
|
||||
@@ -68,17 +78,25 @@ public class Broker implements Serializable {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public static Broker buildFrom(Long clusterPhyId, Integer brokerId, BrokerMetadata brokerMetadata) {
|
||||
Broker metadata = new Broker();
|
||||
metadata.setClusterPhyId(clusterPhyId);
|
||||
metadata.setBrokerId(brokerId);
|
||||
metadata.setHost(brokerMetadata.getHost());
|
||||
metadata.setPort(brokerMetadata.getPort());
|
||||
metadata.setJmxPort(brokerMetadata.getJmxPort());
|
||||
metadata.setStartTimestamp(brokerMetadata.getTimestamp());
|
||||
metadata.setRack(brokerMetadata.getRack());
|
||||
metadata.setStatus(1);
|
||||
return metadata;
|
||||
public static Broker buildFrom(BrokerPO brokerPO) {
|
||||
Broker broker = ConvertUtil.obj2Obj(brokerPO, Broker.class);
|
||||
String endpointMapStr = brokerPO.getEndpointMap();
|
||||
if (broker == null || endpointMapStr == null || endpointMapStr.equals("")) {
|
||||
return broker;
|
||||
}
|
||||
|
||||
// 填充endpoint信息
|
||||
Map<String, IpPortData> endpointMap = ConvertUtil.str2ObjByJson(endpointMapStr, new TypeReference<Map<String, IpPortData>>(){});
|
||||
broker.setEndpointMap(endpointMap);
|
||||
return broker;
|
||||
}
|
||||
|
||||
public String getJmxHost(String endPoint) {
|
||||
if (endPoint == null || endpointMap == null) {
|
||||
return host;
|
||||
}
|
||||
IpPortData ip = endpointMap.get(endPoint);
|
||||
return ip != null ? ip.getIp() : host;
|
||||
}
|
||||
|
||||
public boolean alive() {
|
||||
|
||||
@@ -53,9 +53,16 @@ public class ClusterPhy implements Comparable<ClusterPhy>, EntifyIdInterface {
|
||||
|
||||
/**
|
||||
* jmx配置
|
||||
* @see com.xiaojukeji.know.streaming.km.common.bean.entity.config.JmxConfig
|
||||
*/
|
||||
private String jmxProperties;
|
||||
|
||||
/**
|
||||
* zk配置
|
||||
* @see com.xiaojukeji.know.streaming.km.common.bean.entity.config.ZKConfig
|
||||
*/
|
||||
private String zkProperties;
|
||||
|
||||
/**
|
||||
* 开启ACL
|
||||
* @see com.xiaojukeji.know.streaming.km.common.enums.cluster.ClusterAuthTypeEnum
|
||||
|
||||
@@ -27,6 +27,9 @@ public class JmxConfig implements Serializable {
|
||||
|
||||
@ApiModelProperty(value="SSL情况下的token", example = "KsKmCCY19")
|
||||
private String token;
|
||||
|
||||
@ApiModelProperty(value="使用哪个endpoint网络", example = "EXTERNAL")
|
||||
private String useWhichEndpoint;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.config;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* @author zengqiao
|
||||
* @date 22/02/24
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(description = "ZK配置")
|
||||
public class ZKConfig implements Serializable {
|
||||
@ApiModelProperty(value="ZK的jmx配置")
|
||||
private JmxConfig jmxConfig;
|
||||
|
||||
@ApiModelProperty(value="ZK是否开启secure", example = "false")
|
||||
private Boolean openSecure = false;
|
||||
|
||||
@ApiModelProperty(value="ZK的Session超时时间", example = "15000")
|
||||
private Long sessionTimeoutUnitMs = 15000L;
|
||||
|
||||
@ApiModelProperty(value="ZK的Request超时时间", example = "5000")
|
||||
private Long requestTimeoutUnitMs = 5000L;
|
||||
|
||||
@ApiModelProperty(value="ZK的Request超时时间")
|
||||
private Properties otherProps = new Properties();
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.config.metric;
|
||||
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class UserMetricConfig {
|
||||
|
||||
private int type;
|
||||
@@ -15,6 +15,22 @@ public class UserMetricConfig {
|
||||
|
||||
private boolean set;
|
||||
|
||||
private Integer rank;
|
||||
|
||||
public UserMetricConfig(int type, String metric, boolean set, Integer rank) {
|
||||
this.type = type;
|
||||
this.metric = metric;
|
||||
this.set = set;
|
||||
this.rank = rank;
|
||||
}
|
||||
|
||||
public UserMetricConfig(int type, String metric, boolean set) {
|
||||
this.type = type;
|
||||
this.metric = metric;
|
||||
this.set = set;
|
||||
this.rank = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode(){
|
||||
return metric.hashCode() << 1 + type;
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.param.partition;
|
||||
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterPhyParam;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import org.apache.kafka.common.TopicPartition;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
public class BatchPartitionParam extends ClusterPhyParam {
|
||||
private List<TopicPartition> tpList;
|
||||
|
||||
public BatchPartitionParam(Long clusterPhyId, List<TopicPartition> tpList) {
|
||||
super(clusterPhyId);
|
||||
this.tpList = tpList;
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.param.partition;
|
||||
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterPhyParam;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.topic.TopicParam;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import org.apache.kafka.clients.admin.OffsetSpec;
|
||||
@@ -10,13 +10,13 @@ import java.util.Map;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
public class PartitionOffsetParam extends ClusterPhyParam {
|
||||
public class PartitionOffsetParam extends TopicParam {
|
||||
private Map<TopicPartition, OffsetSpec> topicPartitionOffsets;
|
||||
|
||||
private Long timestamp;
|
||||
|
||||
public PartitionOffsetParam(Long clusterPhyId, Map<TopicPartition, OffsetSpec> topicPartitionOffsets, Long timestamp) {
|
||||
super(clusterPhyId);
|
||||
public PartitionOffsetParam(Long clusterPhyId, String topicName, Map<TopicPartition, OffsetSpec> topicPartitionOffsets, Long timestamp) {
|
||||
super(clusterPhyId, topicName);
|
||||
this.topicPartitionOffsets = topicPartitionOffsets;
|
||||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
@@ -15,4 +15,12 @@ public class TopicParam extends ClusterPhyParam {
|
||||
super(clusterPhyId);
|
||||
this.topicName = topicName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TopicParam{" +
|
||||
"clusterPhyId=" + clusterPhyId +
|
||||
", topicName='" + topicName + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.event.cluster;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
/**
|
||||
* 集群新增事件
|
||||
* @author zengqiao
|
||||
* @date 22/02/25
|
||||
*/
|
||||
@Getter
|
||||
public class ClusterPhyAddedEvent extends ClusterPhyBaseEvent {
|
||||
public ClusterPhyAddedEvent(Object source, Long clusterPhyId) {
|
||||
super(source, clusterPhyId);
|
||||
}
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.event.kafka.zk;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
@Getter
|
||||
public abstract class BaseKafkaZKEvent {
|
||||
/**
|
||||
* 触发时间
|
||||
*/
|
||||
protected Long eventTime;
|
||||
|
||||
/**
|
||||
* 初始化数据的事件
|
||||
*/
|
||||
protected Boolean initEvent;
|
||||
|
||||
/**
|
||||
* 集群ID
|
||||
*/
|
||||
protected Long clusterPhyId;
|
||||
|
||||
protected BaseKafkaZKEvent(Long eventTime, Long clusterPhyId) {
|
||||
this.eventTime = eventTime;
|
||||
this.clusterPhyId = clusterPhyId;
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.event.kafka.zk;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
@Getter
|
||||
public class ControllerChangeEvent extends BaseKafkaZKEvent {
|
||||
public ControllerChangeEvent(Long eventTime, Long clusterPhyId) {
|
||||
super(eventTime, clusterPhyId);
|
||||
}
|
||||
}
|
||||
@@ -42,4 +42,9 @@ public class BrokerPO extends BasePO {
|
||||
* Broker状态
|
||||
*/
|
||||
private Integer status;
|
||||
|
||||
/**
|
||||
* 监听信息
|
||||
*/
|
||||
private String endpointMap;
|
||||
}
|
||||
|
||||
@@ -41,6 +41,11 @@ public class ClusterPhyPO extends BasePO {
|
||||
*/
|
||||
private String jmxProperties;
|
||||
|
||||
/**
|
||||
* zk配置
|
||||
*/
|
||||
private String zkProperties;
|
||||
|
||||
/**
|
||||
* 认证类型
|
||||
* @see com.xiaojukeji.know.streaming.km.common.enums.cluster.ClusterAuthTypeEnum
|
||||
|
||||
@@ -31,9 +31,15 @@ public class ClusterPhyBaseVO extends BaseTimeVO {
|
||||
@ApiModelProperty(value="Jmx配置", example = "{}")
|
||||
protected String jmxProperties;
|
||||
|
||||
@ApiModelProperty(value="ZK配置", example = "{}")
|
||||
protected String zkProperties;
|
||||
|
||||
@ApiModelProperty(value="描述", example = "测试")
|
||||
protected String description;
|
||||
|
||||
@ApiModelProperty(value="集群的kafka版本", example = "2.5.1")
|
||||
protected String kafkaVersion;
|
||||
|
||||
@ApiModelProperty(value="集群的运行模式", example = "2:raft模式,其他是ZK模式")
|
||||
private Integer runState;
|
||||
}
|
||||
|
||||
@@ -14,4 +14,7 @@ import lombok.NoArgsConstructor;
|
||||
public class UserMetricConfigVO extends VersionItemVO {
|
||||
@ApiModelProperty(value = "该指标用户是否设置展现", example = "true")
|
||||
private Boolean set;
|
||||
|
||||
@ApiModelProperty(value = "该指标展示优先级", example = "1")
|
||||
private Integer rank;
|
||||
}
|
||||
|
||||
@@ -29,6 +29,10 @@ public class MetricPointVO implements Comparable<MetricPointVO> {
|
||||
@Override
|
||||
public int compareTo(MetricPointVO o) {
|
||||
if(null == o){return 0;}
|
||||
if(null == this.getTimeStamp()
|
||||
|| null == o.getTimeStamp()){
|
||||
return 0;
|
||||
}
|
||||
|
||||
return this.getTimeStamp().intValue() - o.getTimeStamp().intValue();
|
||||
}
|
||||
|
||||
@@ -42,6 +42,7 @@ public class Constant {
|
||||
*/
|
||||
public static final Integer DEFAULT_CLUSTER_HEALTH_SCORE = 90;
|
||||
|
||||
|
||||
public static final String DEFAULT_USER_NAME = "know-streaming-app";
|
||||
|
||||
public static final int INVALID_CODE = -1;
|
||||
@@ -63,4 +64,6 @@ public class Constant {
|
||||
public static final String COLLECT_METRICS_COST_TIME_METRICS_NAME = "CollectMetricsCostTimeUnitSec";
|
||||
public static final Float COLLECT_METRICS_ERROR_COST_TIME = -1.0F;
|
||||
|
||||
public static final Integer DEFAULT_RETRY_TIME = 3;
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,647 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.constant;
|
||||
|
||||
public class ESIndexConstant {
|
||||
|
||||
public final static String TOPIC_INDEX = "ks_kafka_topic_metric";
|
||||
public final static String TOPIC_TEMPLATE = "{\n" +
|
||||
" \"order\" : 10,\n" +
|
||||
" \"index_patterns\" : [\n" +
|
||||
" \"ks_kafka_topic_metric*\"\n" +
|
||||
" ],\n" +
|
||||
" \"settings\" : {\n" +
|
||||
" \"index\" : {\n" +
|
||||
" \"number_of_shards\" : \"10\"\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"mappings\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"brokerId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"routingValue\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"topic\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"clusterPhyId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"metrics\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"BytesIn_min_15\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"Messages\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesRejected\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"PartitionURP\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckTotal\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"ReplicationCount\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"ReplicationBytesOut\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"ReplicationBytesIn\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"FailedFetchRequests\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesIn_min_5\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthScore\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"LogSize\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesOut\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesOut_min_15\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"FailedProduceRequests\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesIn\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesOut_min_5\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"MessagesIn\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"TotalProduceRequests\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckPassed\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"brokerAgg\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"key\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"timestamp\" : {\n" +
|
||||
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||
" \"index\" : true,\n" +
|
||||
" \"type\" : \"date\",\n" +
|
||||
" \"doc_values\" : true\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"aliases\" : { }\n" +
|
||||
" }";
|
||||
|
||||
public final static String CLUSTER_INDEX = "ks_kafka_cluster_metric";
|
||||
public final static String CLUSTER_TEMPLATE = "{\n" +
|
||||
" \"order\" : 10,\n" +
|
||||
" \"index_patterns\" : [\n" +
|
||||
" \"ks_kafka_cluster_metric*\"\n" +
|
||||
" ],\n" +
|
||||
" \"settings\" : {\n" +
|
||||
" \"index\" : {\n" +
|
||||
" \"number_of_shards\" : \"10\"\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"mappings\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"routingValue\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"clusterPhyId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"metrics\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"Connections\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesIn_min_15\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"PartitionURP\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthScore_Topics\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"EventQueueSize\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"ActiveControllerCount\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"GroupDeads\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesIn_min_5\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckTotal_Topics\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"Partitions\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesOut\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"Groups\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesOut_min_15\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"TotalRequestQueueSize\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckPassed_Groups\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"TotalProduceRequests\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckPassed\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"TotalLogSize\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"GroupEmptys\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"PartitionNoLeader\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthScore_Brokers\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"Messages\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"Topics\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"PartitionMinISR_E\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckTotal\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"Brokers\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"Replicas\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckTotal_Groups\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"GroupRebalances\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"MessageIn\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthScore\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckPassed_Topics\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckTotal_Brokers\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"PartitionMinISR_S\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesIn\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesOut_min_5\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"GroupActives\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"MessagesIn\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"GroupReBalances\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckPassed_Brokers\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthScore_Groups\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"TotalResponseQueueSize\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"Zookeepers\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"LeaderMessages\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthScore_Cluster\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckPassed_Cluster\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckTotal_Cluster\" : {\n" +
|
||||
" \"type\" : \"double\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"key\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"timestamp\" : {\n" +
|
||||
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||
" \"type\" : \"date\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"aliases\" : { }\n" +
|
||||
" }";
|
||||
|
||||
public final static String BROKER_INDEX = "ks_kafka_broker_metric";
|
||||
public final static String BROKER_TEMPLATE = "{\n" +
|
||||
" \"order\" : 10,\n" +
|
||||
" \"index_patterns\" : [\n" +
|
||||
" \"ks_kafka_broker_metric*\"\n" +
|
||||
" ],\n" +
|
||||
" \"settings\" : {\n" +
|
||||
" \"index\" : {\n" +
|
||||
" \"number_of_shards\" : \"10\"\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"mappings\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"brokerId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"routingValue\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"clusterPhyId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"metrics\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"NetworkProcessorAvgIdle\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"UnderReplicatedPartitions\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesIn_min_15\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckTotal\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"RequestHandlerAvgIdle\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"connectionsCount\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesIn_min_5\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthScore\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesOut\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesOut_min_15\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesIn\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"BytesOut_min_5\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"TotalRequestQueueSize\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"MessagesIn\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"TotalProduceRequests\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckPassed\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"TotalResponseQueueSize\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"key\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"timestamp\" : {\n" +
|
||||
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||
" \"index\" : true,\n" +
|
||||
" \"type\" : \"date\",\n" +
|
||||
" \"doc_values\" : true\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"aliases\" : { }\n" +
|
||||
" }";
|
||||
|
||||
public final static String PARTITION_INDEX = "ks_kafka_partition_metric";
|
||||
public final static String PARTITION_TEMPLATE = "{\n" +
|
||||
" \"order\" : 10,\n" +
|
||||
" \"index_patterns\" : [\n" +
|
||||
" \"ks_kafka_partition_metric*\"\n" +
|
||||
" ],\n" +
|
||||
" \"settings\" : {\n" +
|
||||
" \"index\" : {\n" +
|
||||
" \"number_of_shards\" : \"10\"\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"mappings\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"brokerId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"partitionId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"routingValue\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"clusterPhyId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"topic\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"metrics\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"LogStartOffset\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"Messages\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"LogEndOffset\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"key\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"timestamp\" : {\n" +
|
||||
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||
" \"index\" : true,\n" +
|
||||
" \"type\" : \"date\",\n" +
|
||||
" \"doc_values\" : true\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"aliases\" : { }\n" +
|
||||
" }";
|
||||
|
||||
public final static String GROUP_INDEX = "ks_kafka_group_metric";
|
||||
public final static String GROUP_TEMPLATE = "{\n" +
|
||||
" \"order\" : 10,\n" +
|
||||
" \"index_patterns\" : [\n" +
|
||||
" \"ks_kafka_group_metric*\"\n" +
|
||||
" ],\n" +
|
||||
" \"settings\" : {\n" +
|
||||
" \"index\" : {\n" +
|
||||
" \"number_of_shards\" : \"10\"\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"mappings\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"group\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"partitionId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"routingValue\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"clusterPhyId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"topic\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"metrics\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"HealthScore\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"Lag\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"OffsetConsumed\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckTotal\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"HealthCheckPassed\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"groupMetric\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"key\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"timestamp\" : {\n" +
|
||||
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||
" \"index\" : true,\n" +
|
||||
" \"type\" : \"date\",\n" +
|
||||
" \"doc_values\" : true\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"aliases\" : { }\n" +
|
||||
" }";
|
||||
|
||||
public final static String REPLICATION_INDEX = "ks_kafka_replication_metric";
|
||||
public final static String REPLICATION_TEMPLATE = "{\n" +
|
||||
" \"order\" : 10,\n" +
|
||||
" \"index_patterns\" : [\n" +
|
||||
" \"ks_kafka_partition_metric*\"\n" +
|
||||
" ],\n" +
|
||||
" \"settings\" : {\n" +
|
||||
" \"index\" : {\n" +
|
||||
" \"number_of_shards\" : \"10\"\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"mappings\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"brokerId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"partitionId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"routingValue\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"clusterPhyId\" : {\n" +
|
||||
" \"type\" : \"long\"\n" +
|
||||
" },\n" +
|
||||
" \"topic\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"metrics\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"LogStartOffset\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"Messages\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" },\n" +
|
||||
" \"LogEndOffset\" : {\n" +
|
||||
" \"type\" : \"float\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"key\" : {\n" +
|
||||
" \"type\" : \"text\",\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"keyword\" : {\n" +
|
||||
" \"ignore_above\" : 256,\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"timestamp\" : {\n" +
|
||||
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||
" \"index\" : true,\n" +
|
||||
" \"type\" : \"date\",\n" +
|
||||
" \"doc_values\" : true\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"aliases\" : { }\n" +
|
||||
" }[root@10-255-0-23 template]# cat ks_kafka_replication_metric\n" +
|
||||
"PUT _template/ks_kafka_replication_metric\n" +
|
||||
"{\n" +
|
||||
" \"order\" : 10,\n" +
|
||||
" \"index_patterns\" : [\n" +
|
||||
" \"ks_kafka_replication_metric*\"\n" +
|
||||
" ],\n" +
|
||||
" \"settings\" : {\n" +
|
||||
" \"index\" : {\n" +
|
||||
" \"number_of_shards\" : \"10\"\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"mappings\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"timestamp\" : {\n" +
|
||||
" \"format\" : \"yyyy-MM-dd HH:mm:ss Z||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS Z||yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss,SSS||yyyy/MM/dd HH:mm:ss||yyyy-MM-dd HH:mm:ss,SSS Z||yyyy/MM/dd HH:mm:ss,SSS Z||epoch_millis\",\n" +
|
||||
" \"index\" : true,\n" +
|
||||
" \"type\" : \"date\",\n" +
|
||||
" \"doc_values\" : true\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"aliases\" : { }\n" +
|
||||
" }";
|
||||
|
||||
}
|
||||
@@ -33,7 +33,7 @@ public class KafkaConstant {
|
||||
|
||||
public static final Integer DATA_VERSION_ONE = 1;
|
||||
|
||||
public static final Integer ADMIN_CLIENT_REQUEST_TIME_OUT_UNIT_MS = 3000;
|
||||
public static final Integer ADMIN_CLIENT_REQUEST_TIME_OUT_UNIT_MS = 5000;
|
||||
|
||||
public static final Integer KAFKA_SASL_SCRAM_ITERATIONS = 8192;
|
||||
|
||||
@@ -41,6 +41,8 @@ public class KafkaConstant {
|
||||
|
||||
public static final Long POLL_ONCE_TIMEOUT_UNIT_MS = 2000L;
|
||||
|
||||
public static final String CONTROLLER_ROLE = "controller";
|
||||
|
||||
public static final Map<String, ConfigDef.ConfigKey> KAFKA_ALL_CONFIG_DEF_MAP = new ConcurrentHashMap<>();
|
||||
|
||||
static {
|
||||
|
||||
@@ -52,6 +52,10 @@ public class MsgConstant {
|
||||
|
||||
/**************************************************** Partition ****************************************************/
|
||||
|
||||
public static String getPartitionNoLeader(Long clusterPhyId, String topicName) {
|
||||
return String.format("集群ID:[%d] Topic名称:[%s] 所有分区NoLeader", clusterPhyId, topicName);
|
||||
}
|
||||
|
||||
public static String getPartitionNotExist(Long clusterPhyId, String topicName) {
|
||||
return String.format("集群ID:[%d] Topic名称:[%s] 存在非法的分区ID", clusterPhyId, topicName);
|
||||
}
|
||||
|
||||
@@ -19,6 +19,11 @@ public class ClusterConverter {
|
||||
ClusterPhyPO clusterPhyPO = ConvertUtil.obj2Obj(dto, ClusterPhyPO.class);
|
||||
clusterPhyPO.setClientProperties(ConvertUtil.obj2Json(dto.getClientProperties()));
|
||||
clusterPhyPO.setJmxProperties(ConvertUtil.obj2Json(dto.getJmxProperties()));
|
||||
if (ValidateUtils.isNull(dto.getZkProperties())) {
|
||||
clusterPhyPO.setZkProperties("");
|
||||
} else {
|
||||
clusterPhyPO.setZkProperties(ConvertUtil.obj2Json(dto.getZkProperties()));
|
||||
}
|
||||
clusterPhyPO.setRunState(
|
||||
ValidateUtils.isBlank(dto.getZookeeper())?
|
||||
ClusterRunStateEnum.RUN_RAFT.getRunState() :
|
||||
@@ -32,6 +37,11 @@ public class ClusterConverter {
|
||||
ClusterPhyPO clusterPhyPO = ConvertUtil.obj2Obj(dto, ClusterPhyPO.class);
|
||||
clusterPhyPO.setClientProperties(ConvertUtil.obj2Json(dto.getClientProperties()));
|
||||
clusterPhyPO.setJmxProperties(ConvertUtil.obj2Json(dto.getJmxProperties()));
|
||||
if (ValidateUtils.isNull(dto.getZkProperties())) {
|
||||
clusterPhyPO.setZkProperties("");
|
||||
} else {
|
||||
clusterPhyPO.setZkProperties(ConvertUtil.obj2Json(dto.getZkProperties()));
|
||||
}
|
||||
clusterPhyPO.setRunState(
|
||||
ValidateUtils.isBlank(dto.getZookeeper())?
|
||||
ClusterRunStateEnum.RUN_RAFT.getRunState() :
|
||||
|
||||
@@ -170,6 +170,7 @@ public class ReassignConverter {
|
||||
detail.setOriginalBrokerIdList(CommonUtils.string2IntList(subJobPO.getOriginalBrokerIds()));
|
||||
detail.setReassignBrokerIdList(CommonUtils.string2IntList(subJobPO.getReassignBrokerIds()));
|
||||
detail.setStatus(subJobPO.getStatus());
|
||||
detail.setOldReplicaNum(detail.getOriginalBrokerIdList().size());
|
||||
|
||||
ReassignSubJobExtendData extendData = ConvertUtil.str2ObjByJson(subJobPO.getExtendData(), ReassignSubJobExtendData.class);
|
||||
if (extendData != null) {
|
||||
@@ -217,6 +218,7 @@ public class ReassignConverter {
|
||||
|
||||
topicDetail.setPresentReplicaNum(partitionDetailList.get(0).getPresentReplicaNum());
|
||||
topicDetail.setNewReplicaNum(partitionDetailList.get(0).getNewReplicaNum());
|
||||
topicDetail.setOldReplicaNum(partitionDetailList.get(0).getOldReplicaNum());
|
||||
topicDetail.setOriginalRetentionTimeUnitMs(partitionDetailList.get(0).getOriginalRetentionTimeUnitMs());
|
||||
topicDetail.setReassignRetentionTimeUnitMs(partitionDetailList.get(0).getReassignRetentionTimeUnitMs());
|
||||
|
||||
|
||||
@@ -3,19 +3,19 @@ package com.xiaojukeji.know.streaming.km.common.enums;
|
||||
import lombok.Getter;
|
||||
|
||||
/**
|
||||
* 重置offset
|
||||
* offset类型
|
||||
* @author zengqiao
|
||||
* @date 19/4/8
|
||||
*/
|
||||
@Getter
|
||||
public enum GroupOffsetResetEnum {
|
||||
LATEST(0, "重置到最新"),
|
||||
public enum OffsetTypeEnum {
|
||||
LATEST(0, "最新"),
|
||||
|
||||
EARLIEST(1, "重置到最旧"),
|
||||
EARLIEST(1, "最旧"),
|
||||
|
||||
PRECISE_TIMESTAMP(2, "按时间进行重置"),
|
||||
PRECISE_TIMESTAMP(2, "指定时间"),
|
||||
|
||||
PRECISE_OFFSET(3, "重置到指定位置"),
|
||||
PRECISE_OFFSET(3, "指定位置"),
|
||||
|
||||
;
|
||||
|
||||
@@ -23,7 +23,7 @@ public enum GroupOffsetResetEnum {
|
||||
|
||||
private final String message;
|
||||
|
||||
GroupOffsetResetEnum(int resetType, String message) {
|
||||
OffsetTypeEnum(int resetType, String message) {
|
||||
this.resetType = resetType;
|
||||
this.message = message;
|
||||
}
|
||||
@@ -26,7 +26,7 @@ public enum HealthCheckNameEnum {
|
||||
HealthCheckDimensionEnum.CLUSTER,
|
||||
"Controller",
|
||||
Constant.HC_CONFIG_NAME_PREFIX + "CLUSTER_NO_CONTROLLER",
|
||||
"集群Controller数错误",
|
||||
"集群Controller数正常",
|
||||
HealthCompareValueConfig.class
|
||||
),
|
||||
|
||||
@@ -34,7 +34,7 @@ public enum HealthCheckNameEnum {
|
||||
HealthCheckDimensionEnum.BROKER,
|
||||
"RequestQueueSize",
|
||||
Constant.HC_CONFIG_NAME_PREFIX + "BROKER_REQUEST_QUEUE_FULL",
|
||||
"Broker-RequestQueueSize被打满",
|
||||
"Broker-RequestQueueSize指标",
|
||||
HealthCompareValueConfig.class
|
||||
),
|
||||
|
||||
@@ -42,7 +42,7 @@ public enum HealthCheckNameEnum {
|
||||
HealthCheckDimensionEnum.BROKER,
|
||||
"NetworkProcessorAvgIdlePercent",
|
||||
Constant.HC_CONFIG_NAME_PREFIX + "BROKER_NETWORK_PROCESSOR_AVG_IDLE_TOO_LOW",
|
||||
"Broker-NetworkProcessorAvgIdlePercent的Idle过低",
|
||||
"Broker-NetworkProcessorAvgIdlePercent指标",
|
||||
HealthCompareValueConfig.class
|
||||
),
|
||||
|
||||
@@ -50,7 +50,7 @@ public enum HealthCheckNameEnum {
|
||||
HealthCheckDimensionEnum.GROUP,
|
||||
"Group Re-Balance",
|
||||
Constant.HC_CONFIG_NAME_PREFIX + "GROUP_RE_BALANCE_TOO_FREQUENTLY",
|
||||
"Group re-balance太频繁",
|
||||
"Group re-balance频率",
|
||||
HealthDetectedInLatestMinutesConfig.class
|
||||
),
|
||||
|
||||
@@ -66,7 +66,7 @@ public enum HealthCheckNameEnum {
|
||||
HealthCheckDimensionEnum.TOPIC,
|
||||
"UnderReplicaTooLong",
|
||||
Constant.HC_CONFIG_NAME_PREFIX + "TOPIC_UNDER_REPLICA_TOO_LONG",
|
||||
"Topic 长期处于未同步状态",
|
||||
"Topic 未同步持续时间",
|
||||
HealthDetectedInLatestMinutesConfig.class
|
||||
),
|
||||
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.enums.metric;
|
||||
|
||||
/**
|
||||
* @author: D10865
|
||||
* @description:
|
||||
* @date: Create on 2019/3/11 下午2:19
|
||||
* @modified By D10865
|
||||
*
|
||||
* 不同维度的es监控数据
|
||||
*/
|
||||
public enum KafkaMetricIndexEnum {
|
||||
|
||||
/**
|
||||
* topic 维度
|
||||
*/
|
||||
TOPIC_INFO("ks_kafka_topic_metric"),
|
||||
|
||||
/**
|
||||
* 集群 维度
|
||||
*/
|
||||
CLUSTER_INFO("ks_kafka_cluster_metric"),
|
||||
|
||||
/**
|
||||
* broker 维度
|
||||
*/
|
||||
BROKER_INFO("ks_kafka_broker_metric"),
|
||||
|
||||
/**
|
||||
* partition 维度
|
||||
*/
|
||||
PARTITION_INFO("ks_kafka_partition_metric"),
|
||||
|
||||
/**
|
||||
* group 维度
|
||||
*/
|
||||
GROUP_INFO("ks_kafka_group_metric"),
|
||||
|
||||
/**
|
||||
* replication 维度
|
||||
*/
|
||||
REPLICATION_INFO("ks_kafka_replication_metric"),
|
||||
|
||||
;
|
||||
|
||||
private String index;
|
||||
|
||||
KafkaMetricIndexEnum(String index) {
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
public String getIndex() {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
@@ -31,9 +31,11 @@ public enum VersionItemTypeEnum {
|
||||
|
||||
|
||||
SERVICE_OP_PARTITION(320, "service_partition_operation"),
|
||||
SERVICE_OP_PARTITION_LEADER(321, "service_partition-leader_operation"),
|
||||
|
||||
SERVICE_OP_REASSIGNMENT(330, "service_reassign_operation"),
|
||||
|
||||
|
||||
/**
|
||||
* 前端操作
|
||||
*/
|
||||
|
||||
@@ -90,6 +90,8 @@ public class JmxConnectorWrap {
|
||||
}
|
||||
try {
|
||||
jmxConnector.close();
|
||||
|
||||
jmxConnector = null;
|
||||
} catch (IOException e) {
|
||||
LOGGER.warn("close JmxConnector exception, physicalClusterId:{} brokerId:{} host:{} port:{}.", physicalClusterId, brokerId, host, port, e);
|
||||
}
|
||||
@@ -105,6 +107,11 @@ public class JmxConnectorWrap {
|
||||
acquire();
|
||||
MBeanServerConnection mBeanServerConnection = jmxConnector.getMBeanServerConnection();
|
||||
return mBeanServerConnection.getAttribute(name, attribute);
|
||||
} catch (IOException ioe) {
|
||||
// 如果是因为连接断开,则进行重新连接,并抛出异常
|
||||
reInitDueIOException();
|
||||
|
||||
throw ioe;
|
||||
} finally {
|
||||
atomicInteger.incrementAndGet();
|
||||
}
|
||||
@@ -120,6 +127,11 @@ public class JmxConnectorWrap {
|
||||
acquire();
|
||||
MBeanServerConnection mBeanServerConnection = jmxConnector.getMBeanServerConnection();
|
||||
return mBeanServerConnection.getAttributes(name, attributes);
|
||||
} catch (IOException ioe) {
|
||||
// 如果是因为连接断开,则进行重新连接,并抛出异常
|
||||
reInitDueIOException();
|
||||
|
||||
throw ioe;
|
||||
} finally {
|
||||
atomicInteger.incrementAndGet();
|
||||
}
|
||||
@@ -131,6 +143,11 @@ public class JmxConnectorWrap {
|
||||
acquire();
|
||||
MBeanServerConnection mBeanServerConnection = jmxConnector.getMBeanServerConnection();
|
||||
return mBeanServerConnection.queryNames(name, query);
|
||||
} catch (IOException ioe) {
|
||||
// 如果是因为连接断开,则进行重新连接,并抛出异常
|
||||
reInitDueIOException();
|
||||
|
||||
throw ioe;
|
||||
} finally {
|
||||
atomicInteger.incrementAndGet();
|
||||
}
|
||||
@@ -186,4 +203,26 @@ public class JmxConnectorWrap {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized void reInitDueIOException() {
|
||||
try {
|
||||
if (jmxConnector == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// 检查是否正常
|
||||
jmxConnector.getConnectionId();
|
||||
|
||||
// 如果正常则直接返回
|
||||
return;
|
||||
} catch (Exception e) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
// 关闭旧的
|
||||
this.close();
|
||||
|
||||
// 重新创建
|
||||
this.checkJmxConnectionAndInitIfNeed();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -241,4 +241,14 @@ public class CommonUtils {
|
||||
}
|
||||
return intList;
|
||||
}
|
||||
|
||||
public static boolean isNumeric(String str){
|
||||
for (int i = 0; i < str.length(); i++){
|
||||
if (!Character.isDigit(str.charAt(i))){
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
1
km-console/.gitignore
vendored
1
km-console/.gitignore
vendored
@@ -9,6 +9,5 @@ build/
|
||||
coverage
|
||||
versions/
|
||||
debug.log
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
target
|
||||
@@ -1,43 +1,65 @@
|
||||
## 安装项目依赖
|
||||
## 前提
|
||||
|
||||
- 安装 lerna
|
||||
通常情况下,您可以通过 [本地源码启动手册](https://github.com/didi/KnowStreaming/blob/master/docs/dev_guide/%E6%9C%AC%E5%9C%B0%E6%BA%90%E7%A0%81%E5%90%AF%E5%8A%A8%E6%89%8B%E5%86%8C.md) 来打包工程。如果您需要在本地独立启动或打包前端服务,请参考以下手册。
|
||||
|
||||
在进行以下的步骤之前,首先确保您已经安装了 `node`。如已安装,可以通过在终端执行 `node -v` 来获取到 node 版本,项目推荐使用 `node v12` 版本运行(例如 `node v12.22.12`)。
|
||||
|
||||
另外,`windows` 用户请在 `git bash` 下运行下面的命令。
|
||||
|
||||
## 一、进入 km-console 目录
|
||||
|
||||
在终端执行以下步骤时,需要先进入 `KnowStreaming/km-console` 目录。
|
||||
|
||||
## 二、安装项目依赖(必须)
|
||||
|
||||
1. 安装 lerna(可选,安装后可以直接通过 lerna 的全局指令管理项目,如果不了解 lerna 可以不安装)
|
||||
|
||||
```
|
||||
npm install -g lerna
|
||||
```
|
||||
|
||||
- 安装项目依赖
|
||||
2. 安装项目依赖
|
||||
|
||||
```
|
||||
npm run i
|
||||
```
|
||||
|
||||
## 启动项目
|
||||
我们默认保留了 `package-lock.json` 文件,以防止可能的依赖包自动升级导致的问题。依赖默认会通过 taobao 镜像 `https://registry.npmmirror.com/` 服务下载。
|
||||
|
||||
## 三、启动项目(可选,打包构建请直接看步骤三)
|
||||
|
||||
```
|
||||
npm run start
|
||||
```
|
||||
|
||||
### 环境信息
|
||||
该指令会启动 `packages` 目录下的所有应用,如果需要单独启动应用,其查看下方 QA。
|
||||
|
||||
http://localhost:port
|
||||
多集群管理应用会启动在 http://localhost:8000,系统管理应用会占用 http://localhost:8001。
|
||||
请确认 `8000` 和 `8001` 端口没有被其他应用占用。
|
||||
|
||||
## 构建项目
|
||||
后端本地服务启动在 http://localhost:8080,请求通过 webpack dev server 代理访问 8080 端口,需要启动后端服务后才能正常请求接口。
|
||||
|
||||
如果启动失败,可以参见另外一种本地启动方式 [本地源码启动手册](https://github.com/didi/KnowStreaming/blob/master/docs/dev_guide/%E6%9C%AC%E5%9C%B0%E6%BA%90%E7%A0%81%E5%90%AF%E5%8A%A8%E6%89%8B%E5%86%8C.md)
|
||||
|
||||
## 四、构建项目
|
||||
|
||||
```
|
||||
npm run build
|
||||
|
||||
```
|
||||
|
||||
项目构建成功后,会存放到 km-rest/src/main/resources/tamplates 目录下。
|
||||
|
||||
## 目录结构
|
||||
|
||||
- packages
|
||||
- layout-clusters-fe: 基座应用 & 多集群管理
|
||||
- layout-clusters-fe: 基座应用 & 多集群管理(其余应用启动需要首先启动该应用)
|
||||
- config-manager-fe: 子应用 - 系统管理
|
||||
- tool: 启动 & 打包脚本
|
||||
- ...
|
||||
|
||||
## 常见问题
|
||||
## QA
|
||||
|
||||
Q: 在 `km-console` 目录下执行 `npm run start` 时看不到应用构建和热加载过程?如何启动单个应用?
|
||||
|
||||
Q: 执行 `npm run start` 时看不到应用构建和热加载过程?
|
||||
A: 需要到具体的应用中执行 `npm run start`,例如 `cd packages/layout-clusters-fe` 后,执行 `npm run start`。
|
||||
|
||||
如遇到其它问题,请见 [faq](https://github.com/didi/KnowStreaming/blob/master/docs/user_guide/faq.md)。
|
||||
|
||||
8567
km-console/package-lock.json
generated
Normal file
8567
km-console/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -17,15 +17,15 @@
|
||||
"eslint-plugin-react": "7.22.0",
|
||||
"eslint-plugin-react-hooks": "^4.2.0",
|
||||
"husky": "4.3.7",
|
||||
"lerna": "^4.0.0",
|
||||
"lerna": "^5.5.0",
|
||||
"lint-staged": "10.5.3",
|
||||
"prettier": "2.3.2"
|
||||
},
|
||||
"scripts": {
|
||||
"i": "npm install && lerna bootstrap",
|
||||
"clean": "rm -rf node_modules package-lock.json packages/*/node_modules packages/*/package-lock.json",
|
||||
"start": "sh ./tool/start.sh",
|
||||
"build": "sh ./tool/build.sh",
|
||||
"start": "lerna run start",
|
||||
"build": "lerna run build",
|
||||
"changelog": "conventional-changelog -p angular -i CHANGELOG.md -s -r 0 && git add CHANGELOG.md",
|
||||
"cm": "git add . && cz"
|
||||
},
|
||||
|
||||
@@ -9,5 +9,4 @@ build/
|
||||
coverage
|
||||
versions/
|
||||
debug.log
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
@@ -1,17 +1,21 @@
|
||||
## 使用说明
|
||||
|
||||
### 依赖安装:
|
||||
### 依赖安装(如在 km-console 目录下执行 npm run i 安装过依赖,这步可以省略):
|
||||
|
||||
```
|
||||
npm install
|
||||
```
|
||||
|
||||
注意,这种方式只会安装当前应用的依赖。如果您不了解,推荐在 km-console 目录下执行 npm run i 安装依赖。
|
||||
|
||||
### 启动:
|
||||
|
||||
```
|
||||
npm run start
|
||||
```
|
||||
|
||||
该应用为子应用,启动后需要到基座应用中查看(需要启动基座应用,即 layout-clusters-fe),地址为 http://localhost:8000
|
||||
|
||||
### 构建:
|
||||
|
||||
```
|
||||
|
||||
@@ -1,205 +0,0 @@
|
||||
/* eslint-disable */
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const ProgressBarPlugin = require('progress-bar-webpack-plugin');
|
||||
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
|
||||
const StatsPlugin = require('stats-webpack-plugin');
|
||||
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
|
||||
const TerserJSPlugin = require('terser-webpack-plugin');
|
||||
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
||||
const HappyPack = require('happypack');
|
||||
const os = require('os');
|
||||
const happyThreadPool = HappyPack.ThreadPool({ size: os.cpus().length });
|
||||
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
|
||||
const theme = require('./theme');
|
||||
var cwd = process.cwd();
|
||||
|
||||
const path = require('path');
|
||||
const isProd = process.env.NODE_ENV === 'production';
|
||||
const babelOptions = {
|
||||
cacheDirectory: true,
|
||||
babelrc: false,
|
||||
presets: [require.resolve('@babel/preset-env'), require.resolve('@babel/preset-typescript'), require.resolve('@babel/preset-react')],
|
||||
plugins: [
|
||||
[require.resolve('@babel/plugin-proposal-decorators'), { legacy: true }],
|
||||
[require.resolve('@babel/plugin-proposal-class-properties'), { loose: true }],
|
||||
[require.resolve('@babel/plugin-proposal-private-methods'), { loose: true }],
|
||||
require.resolve('@babel/plugin-proposal-export-default-from'),
|
||||
require.resolve('@babel/plugin-proposal-export-namespace-from'),
|
||||
require.resolve('@babel/plugin-proposal-object-rest-spread'),
|
||||
require.resolve('@babel/plugin-transform-runtime'),
|
||||
require.resolve('@babel/plugin-proposal-optional-chaining'), //
|
||||
require.resolve('@babel/plugin-proposal-nullish-coalescing-operator'), // 解决 ?? 无法转义问题
|
||||
require.resolve('@babel/plugin-proposal-numeric-separator'), // 转义 1_000_000
|
||||
!isProd && require.resolve('react-refresh/babel'),
|
||||
]
|
||||
.filter(Boolean)
|
||||
.concat([
|
||||
[
|
||||
'babel-plugin-import',
|
||||
{
|
||||
libraryName: 'antd',
|
||||
style: true,
|
||||
},
|
||||
],
|
||||
'@babel/plugin-transform-object-assign',
|
||||
]),
|
||||
};
|
||||
module.exports = () => {
|
||||
const manifestName = `manifest.json`;
|
||||
const cssFileName = isProd ? '[name]-[chunkhash].css' : '[name].css';
|
||||
|
||||
const plugins = [
|
||||
new ProgressBarPlugin(),
|
||||
new CaseSensitivePathsPlugin(),
|
||||
new MiniCssExtractPlugin({
|
||||
filename: cssFileName,
|
||||
}),
|
||||
new StatsPlugin(manifestName, {
|
||||
chunkModules: false,
|
||||
source: true,
|
||||
chunks: false,
|
||||
modules: false,
|
||||
assets: true,
|
||||
children: false,
|
||||
exclude: [/node_modules/],
|
||||
}),
|
||||
new HappyPack({
|
||||
id: 'babel',
|
||||
loaders: [
|
||||
'cache-loader',
|
||||
{
|
||||
loader: 'babel-loader',
|
||||
options: babelOptions,
|
||||
},
|
||||
],
|
||||
threadPool: happyThreadPool,
|
||||
}),
|
||||
!isProd &&
|
||||
new ReactRefreshWebpackPlugin({
|
||||
overlay: false,
|
||||
}),
|
||||
// new BundleAnalyzerPlugin({
|
||||
// analyzerPort: 8889
|
||||
// }),
|
||||
].filter(Boolean);
|
||||
if (isProd) {
|
||||
plugins.push(new CleanWebpackPlugin());
|
||||
}
|
||||
return {
|
||||
externals: isProd
|
||||
? [
|
||||
/^react$/,
|
||||
/^react\/lib.*/,
|
||||
/^react-dom$/,
|
||||
/.*react-dom.*/,
|
||||
/^single-spa$/,
|
||||
/^single-spa-react$/,
|
||||
/^moment$/,
|
||||
/^antd$/,
|
||||
/^lodash$/,
|
||||
/^react-router$/,
|
||||
/^react-router-dom$/,
|
||||
]
|
||||
: [],
|
||||
resolve: {
|
||||
symlinks: false,
|
||||
extensions: ['.web.jsx', '.web.js', '.ts', '.tsx', '.js', '.jsx', '.json'],
|
||||
alias: {
|
||||
// '@pkgs': path.resolve(cwd, 'src/packages'),
|
||||
'@pkgs': path.resolve(cwd, './node_modules/@didi/d1-packages'),
|
||||
'@cpts': path.resolve(cwd, 'src/components'),
|
||||
'@interface': path.resolve(cwd, 'src/interface'),
|
||||
'@apis': path.resolve(cwd, 'src/api'),
|
||||
react: path.resolve('./node_modules/react'),
|
||||
actions: path.resolve(cwd, 'src/actions'),
|
||||
lib: path.resolve(cwd, 'src/lib'),
|
||||
constants: path.resolve(cwd, 'src/constants'),
|
||||
components: path.resolve(cwd, 'src/components'),
|
||||
container: path.resolve(cwd, 'src/container'),
|
||||
api: path.resolve(cwd, 'src/api'),
|
||||
assets: path.resolve(cwd, 'src/assets'),
|
||||
mobxStore: path.resolve(cwd, 'src/mobxStore'),
|
||||
},
|
||||
},
|
||||
plugins,
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
parser: { system: false },
|
||||
},
|
||||
{
|
||||
test: /\.(js|jsx|ts|tsx)$/,
|
||||
exclude: /node_modules\/(?!react-intl|@didi\/dcloud-design)/,
|
||||
use: [
|
||||
{
|
||||
loader: 'happypack/loader?id=babel',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.(png|svg|jpeg|jpg|gif|ttf|woff|woff2|eot|pdf)$/,
|
||||
use: [
|
||||
{
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
name: '[name].[ext]',
|
||||
outputPath: './assets/image/',
|
||||
esModule: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.(css|less)$/,
|
||||
use: [
|
||||
{
|
||||
loader: MiniCssExtractPlugin.loader,
|
||||
},
|
||||
'css-loader',
|
||||
{
|
||||
loader: 'less-loader',
|
||||
options: {
|
||||
javascriptEnabled: true,
|
||||
modifyVars: theme,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
optimization: Object.assign(
|
||||
{
|
||||
splitChunks: {
|
||||
cacheGroups: {
|
||||
vendor: {
|
||||
test: /[\\/]node_modules[\\/]/,
|
||||
chunks: 'all',
|
||||
name: 'vendor',
|
||||
priority: 10,
|
||||
enforce: true,
|
||||
minChunks: 1,
|
||||
maxSize: 3500000,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
isProd
|
||||
? {
|
||||
minimizer: [
|
||||
new TerserJSPlugin({
|
||||
cache: true,
|
||||
sourceMap: true,
|
||||
}),
|
||||
new OptimizeCSSAssetsPlugin({}),
|
||||
],
|
||||
}
|
||||
: {}
|
||||
),
|
||||
devtool: isProd ? 'cheap-module-source-map' : 'source-map',
|
||||
node: {
|
||||
fs: 'empty',
|
||||
net: 'empty',
|
||||
tls: 'empty',
|
||||
},
|
||||
};
|
||||
};
|
||||
132
km-console/packages/config-manager-fe/config/webpack.common.js
Normal file
132
km-console/packages/config-manager-fe/config/webpack.common.js
Normal file
@@ -0,0 +1,132 @@
|
||||
const path = require('path');
|
||||
const webpack = require('webpack');
|
||||
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const ProgressBarPlugin = require('progress-bar-webpack-plugin');
|
||||
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
|
||||
const StatsPlugin = require('stats-webpack-plugin');
|
||||
const HappyPack = require('happypack');
|
||||
const os = require('os');
|
||||
const happyThreadPool = HappyPack.ThreadPool({ size: os.cpus().length });
|
||||
const theme = require('./theme');
|
||||
const pkgJson = require('../package');
|
||||
|
||||
const devMode = process.env.NODE_ENV === 'development';
|
||||
const babelOptions = {
|
||||
cacheDirectory: true,
|
||||
babelrc: false,
|
||||
presets: [require.resolve('@babel/preset-env'), require.resolve('@babel/preset-typescript'), require.resolve('@babel/preset-react')],
|
||||
plugins: [
|
||||
[require.resolve('@babel/plugin-proposal-decorators'), { legacy: true }],
|
||||
[require.resolve('@babel/plugin-proposal-class-properties'), { loose: true }],
|
||||
[require.resolve('@babel/plugin-proposal-private-methods'), { loose: true }],
|
||||
[require.resolve('@babel/plugin-proposal-private-property-in-object'), { loose: true }],
|
||||
require.resolve('@babel/plugin-proposal-export-default-from'),
|
||||
require.resolve('@babel/plugin-proposal-export-namespace-from'),
|
||||
require.resolve('@babel/plugin-proposal-object-rest-spread'),
|
||||
require.resolve('@babel/plugin-transform-runtime'),
|
||||
require.resolve('@babel/plugin-proposal-optional-chaining'), //
|
||||
require.resolve('@babel/plugin-proposal-nullish-coalescing-operator'), // 解决 ?? 无法转义问题
|
||||
require.resolve('@babel/plugin-proposal-numeric-separator'), // 转义 1_000_000
|
||||
devMode && require.resolve('react-refresh/babel'),
|
||||
].filter(Boolean),
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
entry: {
|
||||
[pkgJson.ident]: ['./src/index.tsx'],
|
||||
},
|
||||
resolve: {
|
||||
symlinks: false,
|
||||
extensions: ['.web.jsx', '.web.js', '.ts', '.tsx', '.js', '.jsx', '.json'],
|
||||
alias: {
|
||||
'@src': path.resolve(process.cwd(), 'src'),
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
new ProgressBarPlugin(),
|
||||
new CaseSensitivePathsPlugin(),
|
||||
new StatsPlugin('manifest.json', {
|
||||
chunkModules: false,
|
||||
source: true,
|
||||
chunks: false,
|
||||
modules: false,
|
||||
assets: true,
|
||||
children: false,
|
||||
exclude: [/node_modules/],
|
||||
}),
|
||||
new HappyPack({
|
||||
id: 'babel',
|
||||
loaders: [
|
||||
'cache-loader',
|
||||
{
|
||||
loader: 'babel-loader',
|
||||
options: babelOptions,
|
||||
},
|
||||
],
|
||||
threadPool: happyThreadPool,
|
||||
}),
|
||||
new webpack.DefinePlugin({
|
||||
'process.env': {
|
||||
NODE_ENV: JSON.stringify(process.env.NODE_ENV),
|
||||
RUN_ENV: JSON.stringify(process.env.RUN_ENV),
|
||||
},
|
||||
}),
|
||||
new HtmlWebpackPlugin({
|
||||
meta: {
|
||||
manifest: 'manifest.json',
|
||||
},
|
||||
template: './src/index.html',
|
||||
inject: 'body',
|
||||
}),
|
||||
].filter(Boolean),
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
parser: { system: false },
|
||||
},
|
||||
{
|
||||
test: /\.(js|jsx|ts|tsx)$/,
|
||||
exclude: /node_modules\/(?!react-intl|@didi\/dcloud-design)/,
|
||||
use: [
|
||||
{
|
||||
loader: 'happypack/loader?id=babel',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.(png|svg|jpeg|jpg|gif|ttf|woff|woff2|eot|pdf)$/,
|
||||
use: [
|
||||
{
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
name: '[name].[ext]',
|
||||
outputPath: './assets/image/',
|
||||
esModule: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.(css|less)$/,
|
||||
use: [
|
||||
MiniCssExtractPlugin.loader,
|
||||
'css-loader',
|
||||
{
|
||||
loader: 'less-loader',
|
||||
options: {
|
||||
javascriptEnabled: true,
|
||||
modifyVars: theme,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
node: {
|
||||
fs: 'empty',
|
||||
net: 'empty',
|
||||
tls: 'empty',
|
||||
},
|
||||
stats: 'errors-warnings',
|
||||
};
|
||||
35
km-console/packages/config-manager-fe/config/webpack.dev.js
Normal file
35
km-console/packages/config-manager-fe/config/webpack.dev.js
Normal file
@@ -0,0 +1,35 @@
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
|
||||
const pkgJson = require('../package');
|
||||
|
||||
module.exports = {
|
||||
mode: 'development',
|
||||
plugins: [
|
||||
new MiniCssExtractPlugin(),
|
||||
new ReactRefreshWebpackPlugin({
|
||||
overlay: false,
|
||||
}),
|
||||
],
|
||||
devServer: {
|
||||
host: '127.0.0.1',
|
||||
port: pkgJson.port,
|
||||
hot: true,
|
||||
open: false,
|
||||
publicPath: `http://localhost:${pkgJson.port}/${pkgJson.ident}/`,
|
||||
inline: true,
|
||||
disableHostCheck: true,
|
||||
historyApiFallback: true,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
},
|
||||
},
|
||||
output: {
|
||||
path: '/',
|
||||
publicPath: `http://localhost:${pkgJson.port}/${pkgJson.ident}/`,
|
||||
library: pkgJson.ident,
|
||||
libraryTarget: 'amd',
|
||||
filename: '[name].js',
|
||||
chunkFilename: '[name].js',
|
||||
},
|
||||
devtool: 'cheap-module-eval-source-map',
|
||||
};
|
||||
59
km-console/packages/config-manager-fe/config/webpack.prod.js
Normal file
59
km-console/packages/config-manager-fe/config/webpack.prod.js
Normal file
@@ -0,0 +1,59 @@
|
||||
const path = require('path');
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
|
||||
const TerserJSPlugin = require('terser-webpack-plugin');
|
||||
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
||||
const pkgJson = require('../package');
|
||||
|
||||
module.exports = {
|
||||
mode: 'production',
|
||||
externals: [
|
||||
/^react$/,
|
||||
/^react\/lib.*/,
|
||||
/^react-dom$/,
|
||||
/.*react-dom.*/,
|
||||
/^single-spa$/,
|
||||
/^single-spa-react$/,
|
||||
/^moment$/,
|
||||
/^lodash$/,
|
||||
/^react-router$/,
|
||||
/^react-router-dom$/,
|
||||
],
|
||||
plugins: [
|
||||
new CleanWebpackPlugin(),
|
||||
new MiniCssExtractPlugin({
|
||||
filename: '[name]-[chunkhash].css',
|
||||
}),
|
||||
],
|
||||
output: {
|
||||
path: path.resolve(process.cwd(), `../../../km-rest/src/main/resources/templates/${pkgJson.ident}`),
|
||||
publicPath: `${process.env.PUBLIC_PATH}/${pkgJson.ident}/`,
|
||||
library: pkgJson.ident,
|
||||
libraryTarget: 'amd',
|
||||
filename: '[name]-[chunkhash].js',
|
||||
chunkFilename: '[name]-[chunkhash].js',
|
||||
},
|
||||
optimization: {
|
||||
splitChunks: {
|
||||
cacheGroups: {
|
||||
vendor: {
|
||||
test: /[\\/]node_modules[\\/]/,
|
||||
chunks: 'all',
|
||||
name: 'vendor',
|
||||
priority: 10,
|
||||
enforce: true,
|
||||
minChunks: 1,
|
||||
maxSize: 3500000,
|
||||
},
|
||||
},
|
||||
},
|
||||
minimizer: [
|
||||
new TerserJSPlugin({
|
||||
cache: true,
|
||||
sourceMap: true,
|
||||
}),
|
||||
new OptimizeCSSAssetsPlugin({}),
|
||||
],
|
||||
},
|
||||
devtool: 'none',
|
||||
};
|
||||
13847
km-console/packages/config-manager-fe/package-lock.json
generated
Normal file
13847
km-console/packages/config-manager-fe/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -18,12 +18,14 @@
|
||||
"scripts": {
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"start": "cross-env NODE_ENV=development webpack-dev-server",
|
||||
"build": "rm -rf ../../pub/layout & cross-env NODE_ENV=production webpack --max_old_space_size=8000"
|
||||
"build": "cross-env NODE_ENV=production webpack --max_old_space_size=8000"
|
||||
},
|
||||
"dependencies": {
|
||||
"@knowdesign/icons": "^1.0.0",
|
||||
"babel-preset-react-app": "^10.0.0",
|
||||
"classnames": "^2.2.6",
|
||||
"dotenv": "^16.0.1",
|
||||
"knowdesign": "1.3.7",
|
||||
"less": "^3.9.0",
|
||||
"lodash": "^4.17.11",
|
||||
"mobx": "4.15.7",
|
||||
@@ -36,8 +38,7 @@
|
||||
"react-intl": "^3.2.1",
|
||||
"react-router-cache-route": "^1.11.1",
|
||||
"single-spa": "^5.8.0",
|
||||
"single-spa-react": "^2.14.0",
|
||||
"knowdesign": "1.3.7"
|
||||
"single-spa-react": "^2.14.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ant-design/icons": "^4.6.2",
|
||||
@@ -58,6 +59,7 @@
|
||||
"@pmmmwh/react-refresh-webpack-plugin": "^0.5.1",
|
||||
"@types/lodash": "^4.14.138",
|
||||
"@types/react-dom": "^17.0.5",
|
||||
"@types/react-router": "5.1.18",
|
||||
"@types/react-router-dom": "^5.3.3",
|
||||
"@types/single-spa-react": "^2.12.0",
|
||||
"@typescript-eslint/eslint-plugin": "4.13.0",
|
||||
|
||||
@@ -2,8 +2,8 @@ import React from 'react';
|
||||
import { BrowserRouter as Router, Redirect, Switch } from 'react-router-dom';
|
||||
import _ from 'lodash';
|
||||
import './constants/axiosConfig';
|
||||
import dantdZhCN from 'knowdesign/lib/locale/zh_CN';
|
||||
import dantdEnUS from 'knowdesign/lib/locale/en_US';
|
||||
import dantdZhCN from 'knowdesign/es/locale/zh_CN';
|
||||
import dantdEnUS from 'knowdesign/es/locale/en_US';
|
||||
import intlZhCN from './locales/zh';
|
||||
import intlEnUS from './locales/en';
|
||||
import { AppContainer, RouteGuard, DProLayout } from 'knowdesign';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { DownOutlined } from '@ant-design/icons';
|
||||
import { Popover } from 'knowdesign';
|
||||
import { TooltipPlacement } from 'knowdesign/lib/basic/tooltip';
|
||||
import { TooltipPlacement } from 'knowdesign/es/basic/tooltip';
|
||||
import React, { useState, useRef, useEffect } from 'react';
|
||||
import './index.less';
|
||||
|
||||
@@ -90,8 +90,9 @@ export default (props: PropsType) => {
|
||||
return (
|
||||
<div
|
||||
key={i}
|
||||
className={`container-item ${curState.calculated ? (curState.isHideExpandNode ? 'show' : i >= curState.endI ? 'hide' : 'show') : ''
|
||||
}`}
|
||||
className={`container-item ${
|
||||
curState.calculated ? (curState.isHideExpandNode ? 'show' : i >= curState.endI ? 'hide' : 'show') : ''
|
||||
}`}
|
||||
>
|
||||
{item}
|
||||
</div>
|
||||
|
||||
@@ -22,6 +22,20 @@
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-bottom: 12px;
|
||||
.left,
|
||||
.right {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
.left .refresh-icon {
|
||||
font-size: 20px;
|
||||
color: #74788d;
|
||||
cursor: pointer;
|
||||
}
|
||||
.right .search-input {
|
||||
width: 248px;
|
||||
margin-right: 8px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,11 +35,20 @@ serviceInstance.interceptors.request.use(
|
||||
// 响应拦截
|
||||
serviceInstance.interceptors.response.use(
|
||||
(config: any) => {
|
||||
return config.data;
|
||||
const res: { code: number; message: string; data: any } = config.data;
|
||||
if (res.code !== 0 && res.code !== 200) {
|
||||
const desc = res.message;
|
||||
notification.error({
|
||||
message: desc,
|
||||
duration: 3,
|
||||
});
|
||||
throw res;
|
||||
}
|
||||
return res;
|
||||
},
|
||||
(err: any) => {
|
||||
const config = err.config;
|
||||
if (!config || !config.retryTimes) return dealResponse(err, config.customNotification);
|
||||
const config = err?.config;
|
||||
if (!config || !config.retryTimes) return dealResponse(err);
|
||||
const { __retryCount = 0, retryDelay = 300, retryTimes } = config;
|
||||
config.__retryCount = __retryCount;
|
||||
if (__retryCount >= retryTimes) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useLayoutEffect } from 'react';
|
||||
import { Utils, AppContainer } from 'knowdesign';
|
||||
import { goLogin } from 'constants/axiosConfig';
|
||||
import { goLogin } from '@src/constants/axiosConfig';
|
||||
|
||||
// 权限对应表
|
||||
export enum ConfigPermissionMap {
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
AppContainer,
|
||||
Utils,
|
||||
} from 'knowdesign';
|
||||
import { IconFont } from '@knowdesign/icons';
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import moment from 'moment';
|
||||
// 引入代码编辑器
|
||||
@@ -26,8 +27,8 @@ import 'codemirror/addon/selection/active-line';
|
||||
import 'codemirror/addon/edit/closebrackets';
|
||||
require('codemirror/mode/xml/xml');
|
||||
require('codemirror/mode/javascript/javascript');
|
||||
import api from 'api';
|
||||
import { defaultPagination } from 'constants/common';
|
||||
import api from '@src/api';
|
||||
import { defaultPagination } from '@src/constants/common';
|
||||
import TypicalListCard from '../../components/TypicalListCard';
|
||||
import { ConfigPermissionMap } from '../CommonConfig';
|
||||
import { ConfigOperate, ConfigProps } from './config';
|
||||
@@ -384,7 +385,7 @@ export default () => {
|
||||
const onDelete = (record: ConfigProps) => {
|
||||
confirm({
|
||||
title: '确定删除配置吗?',
|
||||
content: `配置⌈${record.valueName}⌋${record.status === 1 ? '为启用状态,无法删除' : ''}`,
|
||||
content: `配置 [${record.valueName}] ${record.status === 1 ? '为启用状态,无法删除' : ''}`,
|
||||
centered: true,
|
||||
okText: '删除',
|
||||
okType: 'primary',
|
||||
@@ -398,9 +399,11 @@ export default () => {
|
||||
},
|
||||
maskClosable: true,
|
||||
onOk() {
|
||||
return request(api.editConfig, {
|
||||
method: 'POST',
|
||||
data: record.id,
|
||||
return request(api.delConfig, {
|
||||
method: 'DELETE',
|
||||
params: {
|
||||
id: record.id,
|
||||
},
|
||||
}).then((_) => {
|
||||
message.success('删除成功');
|
||||
getConfigList();
|
||||
@@ -431,22 +434,28 @@ export default () => {
|
||||
<TypicalListCard title="配置管理">
|
||||
<div className="config-manage-page">
|
||||
<div className="operate-bar">
|
||||
<Form form={form} layout="inline" onFinish={() => getConfigList({ page: 1 })}>
|
||||
<Form.Item name="valueGroup">
|
||||
<Select style={{ width: 180 }} placeholder="请选择模块" options={configGroupList} />
|
||||
</Form.Item>
|
||||
<Form.Item name="valueName">
|
||||
<Input style={{ width: 180 }} placeholder="请输入配置键" />
|
||||
</Form.Item>
|
||||
<Form.Item name="memo">
|
||||
<Input style={{ width: 180 }} placeholder="请输入描述" />
|
||||
</Form.Item>
|
||||
<Form.Item>
|
||||
<Button type="primary" ghost htmlType="submit">
|
||||
查询
|
||||
</Button>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
<div className="left">
|
||||
<div className="refresh-icon" onClick={() => getConfigList()}>
|
||||
<IconFont className="icon" type="icon-shuaxin1" />
|
||||
</div>
|
||||
<Divider type="vertical" style={{ height: 20, top: 0 }} />
|
||||
<Form form={form} layout="inline" onFinish={() => getConfigList({ page: 1 })}>
|
||||
<Form.Item name="valueGroup">
|
||||
<Select style={{ width: 180 }} placeholder="请选择模块" options={configGroupList} />
|
||||
</Form.Item>
|
||||
<Form.Item name="valueName">
|
||||
<Input style={{ width: 180 }} placeholder="请输入配置键" />
|
||||
</Form.Item>
|
||||
<Form.Item name="memo">
|
||||
<Input style={{ width: 180 }} placeholder="请输入描述" />
|
||||
</Form.Item>
|
||||
<Form.Item>
|
||||
<Button type="primary" ghost htmlType="submit">
|
||||
查询
|
||||
</Button>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</div>
|
||||
{global.hasPermission && global.hasPermission(ConfigPermissionMap.CONFIG_ADD) ? (
|
||||
<Button
|
||||
type="primary"
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { Button, Form, Input, Select, ProTable, DatePicker, Utils, Tooltip } from 'knowdesign';
|
||||
import api from 'api';
|
||||
import { defaultPagination } from 'constants/common';
|
||||
import { Button, Form, Input, Select, ProTable, DatePicker, Utils, Tooltip, Divider } from 'knowdesign';
|
||||
import { IconFont } from '@knowdesign/icons';
|
||||
import api from '@src/api';
|
||||
import { defaultPagination } from '@src/constants/common';
|
||||
import TypicalListCard from '../../components/TypicalListCard';
|
||||
import './index.less';
|
||||
import moment from 'moment';
|
||||
@@ -119,25 +120,32 @@ export default () => {
|
||||
<>
|
||||
<TypicalListCard title="操作记录">
|
||||
<div className="operate-bar">
|
||||
<Form form={form} layout="inline" onFinish={() => getData({ page: 1 })}>
|
||||
<Form.Item name="targetType">
|
||||
<Select placeholder="请选择模块" options={configGroupList} style={{ width: 160 }} />
|
||||
</Form.Item>
|
||||
<Form.Item name="target">
|
||||
<Input placeholder="请输入操作对象" />
|
||||
</Form.Item>
|
||||
<Form.Item name="detail">
|
||||
<Input placeholder="请输入操作内容" />
|
||||
</Form.Item>
|
||||
<Form.Item name="time">
|
||||
<RangePicker showTime />
|
||||
</Form.Item>
|
||||
<Form.Item>
|
||||
<Button type="primary" ghost htmlType="submit">
|
||||
查询
|
||||
</Button>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
<div className="left">
|
||||
<div className="refresh-icon" onClick={() => getData()}>
|
||||
<IconFont className="icon" type="icon-shuaxin1" />
|
||||
</div>
|
||||
<Divider type="vertical" style={{ height: 20, top: 0 }} />
|
||||
|
||||
<Form form={form} layout="inline" onFinish={() => getData({ page: 1 })}>
|
||||
<Form.Item name="targetType">
|
||||
<Select placeholder="请选择模块" options={configGroupList} style={{ width: 160 }} />
|
||||
</Form.Item>
|
||||
<Form.Item name="target">
|
||||
<Input placeholder="请输入操作对象" />
|
||||
</Form.Item>
|
||||
<Form.Item name="detail">
|
||||
<Input placeholder="请输入操作内容" />
|
||||
</Form.Item>
|
||||
<Form.Item name="time">
|
||||
<RangePicker showTime />
|
||||
</Form.Item>
|
||||
<Form.Item>
|
||||
<Button type="primary" ghost htmlType="submit">
|
||||
查询
|
||||
</Button>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<ProTable
|
||||
|
||||
@@ -73,12 +73,12 @@ const CheckboxGroupContainer = (props: CheckboxGroupType) => {
|
||||
</Checkbox>
|
||||
</div>
|
||||
<Checkbox.Group disabled={disabled} style={{ width: '100%' }} value={checkedList} onChange={onCheckedChange}>
|
||||
<Row gutter={[34, 10]}>
|
||||
<Row gutter={[10, 10]}>
|
||||
{options.map((option) => {
|
||||
return (
|
||||
<Col span={8} key={option.value}>
|
||||
<Checkbox value={option.value} className="checkbox-content-ellipsis">
|
||||
{option.label}
|
||||
{option.label.replace('Cluster-Load', '')}
|
||||
</Checkbox>
|
||||
</Col>
|
||||
);
|
||||
|
||||
@@ -20,10 +20,10 @@ import {
|
||||
IconFont,
|
||||
} from 'knowdesign';
|
||||
import moment from 'moment';
|
||||
import { CloseOutlined, LoadingOutlined, PlusOutlined } from '@ant-design/icons';
|
||||
import { defaultPagination } from 'constants/common';
|
||||
import { LoadingOutlined, PlusOutlined } from '@ant-design/icons';
|
||||
import { defaultPagination } from '@src/constants/common';
|
||||
import { RoleProps, PermissionNode, AssignUser, RoleOperate, FormItemPermission } from './config';
|
||||
import api from 'api';
|
||||
import api from '@src/api';
|
||||
import CheckboxGroupContainer from './CheckboxGroupContainer';
|
||||
import { ConfigPermissionMap } from '../CommonConfig';
|
||||
|
||||
@@ -50,11 +50,21 @@ const RoleDetailAndUpdate = forwardRef((props, ref): JSX.Element => {
|
||||
useEffect(() => {
|
||||
const globalPermissions = global.permissions;
|
||||
if (globalPermissions && globalPermissions.length) {
|
||||
const sysPermissions = globalPermissions.map((sys: PermissionNode) => ({
|
||||
id: sys.id,
|
||||
name: sys.permissionName,
|
||||
options: sys.childList.map((node) => ({ label: node.permissionName, value: node.id })),
|
||||
}));
|
||||
const sysPermissions = globalPermissions.map((sys: PermissionNode) => {
|
||||
const result = {
|
||||
id: sys.id,
|
||||
name: sys.permissionName,
|
||||
essentialPermission: undefined,
|
||||
options: [],
|
||||
};
|
||||
result.options = sys.childList.map((node) => {
|
||||
if (node.permissionName === '多集群管理查看' || node.permissionName === '系统管理查看') {
|
||||
result.essentialPermission = { label: node.permissionName, value: node.id };
|
||||
}
|
||||
return { label: node.permissionName, value: node.id };
|
||||
});
|
||||
return result;
|
||||
});
|
||||
setPermissions(sysPermissions);
|
||||
}
|
||||
}, [global]);
|
||||
@@ -79,7 +89,10 @@ const RoleDetailAndUpdate = forwardRef((props, ref): JSX.Element => {
|
||||
form.validateFields().then((formData) => {
|
||||
formData.permissionIdList.forEach((arr, i) => {
|
||||
// 如果分配的系统下的子权限,自动赋予该系统的权限
|
||||
if (arr !== null && arr.length) {
|
||||
if (!Array.isArray(arr)) {
|
||||
arr = [];
|
||||
}
|
||||
if (arr?.length) {
|
||||
arr.push(permissions[i].id);
|
||||
}
|
||||
});
|
||||
@@ -209,10 +222,20 @@ const RoleDetailAndUpdate = forwardRef((props, ref): JSX.Element => {
|
||||
<Form.Item
|
||||
label="分配权限"
|
||||
name="permissionIdList"
|
||||
required
|
||||
rules={[
|
||||
() => ({
|
||||
validator(_, value) {
|
||||
if (Array.isArray(value) && value.some((item) => !!item.length)) {
|
||||
if (Array.isArray(value) && value.some((item) => !!item?.length)) {
|
||||
const errs = [];
|
||||
value.forEach((arr, i) => {
|
||||
if (arr?.length && !arr.includes(permissions[i].essentialPermission.value)) {
|
||||
errs.push(`[${permissions[i].essentialPermission.label}]`);
|
||||
}
|
||||
});
|
||||
if (errs.length) {
|
||||
return Promise.reject(`您必须分配 ${errs.join(' 和 ')} 权限`);
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
return Promise.reject(new Error('请为角色至少分配一项权限'));
|
||||
@@ -588,38 +611,45 @@ export default (props: { curTabKey: string }): JSX.Element => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="operate-bar-right">
|
||||
<Input
|
||||
className="search-input"
|
||||
suffix={
|
||||
<IconFont
|
||||
type="icon-fangdajing"
|
||||
onClick={(_) => {
|
||||
setSearchKeywords(searchKeywordsInput);
|
||||
}}
|
||||
style={{ fontSize: '16px' }}
|
||||
/>
|
||||
}
|
||||
placeholder="请输入角色名称"
|
||||
value={searchKeywordsInput}
|
||||
onPressEnter={(_) => {
|
||||
setSearchKeywords(searchKeywordsInput);
|
||||
}}
|
||||
onChange={(e) => {
|
||||
setSearchKeywordsInput(e.target.value);
|
||||
}}
|
||||
/>
|
||||
{global.hasPermission && global.hasPermission(ConfigPermissionMap.ROLE_ADD) ? (
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<PlusOutlined />}
|
||||
onClick={() => detailRef.current.onOpen(true, RoleOperate.Add, getRoleList, undefined)}
|
||||
>
|
||||
新增角色
|
||||
</Button>
|
||||
) : (
|
||||
<></>
|
||||
)}
|
||||
<div className="operate-bar">
|
||||
<div className="left">
|
||||
<div className="refresh-icon" onClick={() => getRoleList()}>
|
||||
<IconFont className="icon" type="icon-shuaxin1" />
|
||||
</div>
|
||||
</div>
|
||||
<div className="right">
|
||||
<Input
|
||||
className="search-input"
|
||||
suffix={
|
||||
<IconFont
|
||||
type="icon-fangdajing"
|
||||
onClick={(_) => {
|
||||
setSearchKeywords(searchKeywordsInput);
|
||||
}}
|
||||
style={{ fontSize: '16px' }}
|
||||
/>
|
||||
}
|
||||
placeholder="请输入角色名称"
|
||||
value={searchKeywordsInput}
|
||||
onPressEnter={(_) => {
|
||||
setSearchKeywords(searchKeywordsInput);
|
||||
}}
|
||||
onChange={(e) => {
|
||||
setSearchKeywordsInput(e.target.value);
|
||||
}}
|
||||
/>
|
||||
{global.hasPermission && global.hasPermission(ConfigPermissionMap.ROLE_ADD) ? (
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<PlusOutlined />}
|
||||
onClick={() => detailRef.current.onOpen(true, RoleOperate.Add, getRoleList, undefined)}
|
||||
>
|
||||
新增角色
|
||||
</Button>
|
||||
) : (
|
||||
<></>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<ProTable
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import React, { forwardRef, useCallback, useEffect, useImperativeHandle, useRef, useState } from 'react';
|
||||
import { Form, ProTable, Select, Button, Input, Modal, message, Drawer, Space, Divider, AppContainer, Utils } from 'knowdesign';
|
||||
import { IconFont } from '@knowdesign/icons';
|
||||
import { PlusOutlined, QuestionCircleOutlined } from '@ant-design/icons';
|
||||
import moment from 'moment';
|
||||
import { defaultPagination } from 'constants/common';
|
||||
import { defaultPagination } from '@src/constants/common';
|
||||
import { UserProps, UserOperate } from './config';
|
||||
import CheckboxGroupContainer from './CheckboxGroupContainer';
|
||||
import TagsWithHide from '../../components/TagsWithHide/index';
|
||||
import api from 'api';
|
||||
import api from '@src/api';
|
||||
import { ConfigPermissionMap } from '../CommonConfig';
|
||||
|
||||
const { confirm } = Modal;
|
||||
@@ -341,22 +342,29 @@ export default (props: { curTabKey: string }) => {
|
||||
return (
|
||||
<>
|
||||
<div className="operate-bar">
|
||||
<Form form={form} layout="inline" onFinish={() => getUserList({ page: 1 })}>
|
||||
<Form.Item name="userName">
|
||||
<Input placeholder="请输入用户账号" />
|
||||
</Form.Item>
|
||||
<Form.Item name="realName">
|
||||
<Input placeholder="请输入用户实名" />
|
||||
</Form.Item>
|
||||
<Form.Item name="roleId">
|
||||
<Select style={{ width: 190 }} placeholder="选择平台已创建的角色名" options={simpleRoleList} />
|
||||
</Form.Item>
|
||||
<Form.Item>
|
||||
<Button type="primary" ghost htmlType="submit">
|
||||
查询
|
||||
</Button>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
<div className="left">
|
||||
<div className="refresh-icon" onClick={() => getUserList()}>
|
||||
<IconFont className="icon" type="icon-shuaxin1" />
|
||||
</div>
|
||||
<Divider type="vertical" style={{ height: 20, top: 0 }} />
|
||||
|
||||
<Form form={form} layout="inline" onFinish={() => getUserList({ page: 1 })}>
|
||||
<Form.Item name="userName">
|
||||
<Input placeholder="请输入用户账号" />
|
||||
</Form.Item>
|
||||
<Form.Item name="realName">
|
||||
<Input placeholder="请输入用户实名" />
|
||||
</Form.Item>
|
||||
<Form.Item name="roleId">
|
||||
<Select style={{ width: 190 }} placeholder="选择平台已创建的角色名" options={simpleRoleList} />
|
||||
</Form.Item>
|
||||
<Form.Item>
|
||||
<Button type="primary" ghost htmlType="submit">
|
||||
查询
|
||||
</Button>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</div>
|
||||
{global.hasPermission && global.hasPermission(ConfigPermissionMap.USER_ADD) ? (
|
||||
<Button
|
||||
type="primary"
|
||||
|
||||
@@ -59,5 +59,6 @@ export enum RoleOperate {
|
||||
export interface FormItemPermission {
|
||||
id: number;
|
||||
name: string;
|
||||
essentialPermission: { label: string; value: number };
|
||||
options: { label: string; value: number }[];
|
||||
}
|
||||
|
||||
@@ -44,13 +44,3 @@
|
||||
.role-tab-assign-user .desc-row {
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
.operate-bar-right {
|
||||
display: flex;
|
||||
justify-content: right;
|
||||
margin-bottom: 12px;
|
||||
.search-input {
|
||||
width: 248px;
|
||||
margin-right: 8px;
|
||||
}
|
||||
}
|
||||
@@ -1,56 +1,9 @@
|
||||
/* eslint-disable */
|
||||
const path = require('path');
|
||||
require('dotenv').config({ path: path.resolve(process.cwd(), '../../.env') });
|
||||
const isProd = process.env.NODE_ENV === 'production';
|
||||
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||
const webpack = require('webpack');
|
||||
const merge = require('webpack-merge');
|
||||
const pkgJson = require('./package');
|
||||
const getWebpackCommonConfig = require('./config/d1-webpack.base');
|
||||
const outPath = path.resolve(__dirname, `../../../km-rest/src/main/resources/templates/${pkgJson.ident}`);
|
||||
const jsFileName = isProd ? '[name]-[chunkhash].js' : '[name].js';
|
||||
const devMode = process.env.NODE_ENV === 'development';
|
||||
const commonConfig = require('./config/webpack.common');
|
||||
const devConfig = require('./config/webpack.dev');
|
||||
const prodConfig = require('./config/webpack.prod');
|
||||
|
||||
module.exports = merge(getWebpackCommonConfig(), {
|
||||
mode: isProd ? 'production' : 'development',
|
||||
entry: {
|
||||
[pkgJson.ident]: ['./src/index.tsx'],
|
||||
},
|
||||
plugins: [
|
||||
new webpack.DefinePlugin({
|
||||
'process.env': {
|
||||
NODE_ENV: JSON.stringify(process.env.NODE_ENV),
|
||||
RUN_ENV: JSON.stringify(process.env.RUN_ENV),
|
||||
},
|
||||
}),
|
||||
new HtmlWebpackPlugin({
|
||||
meta: {
|
||||
manifest: 'manifest.json',
|
||||
},
|
||||
template: './src/index.html',
|
||||
inject: 'body',
|
||||
}),
|
||||
],
|
||||
output: {
|
||||
path: outPath,
|
||||
publicPath: isProd ? `${process.env.PUBLIC_PATH}/${pkgJson.ident}/` : `http://localhost:${pkgJson.port}/${pkgJson.ident}/`,
|
||||
library: pkgJson.ident,
|
||||
libraryTarget: 'amd',
|
||||
filename: jsFileName,
|
||||
chunkFilename: jsFileName,
|
||||
},
|
||||
devtool: isProd ? 'none' : 'cheap-module-eval-source-map',
|
||||
devServer: {
|
||||
host: '127.0.0.1',
|
||||
port: pkgJson.port,
|
||||
hot: true,
|
||||
open: false,
|
||||
publicPath: `http://localhost:${pkgJson.port}/${pkgJson.ident}/`,
|
||||
inline: true,
|
||||
disableHostCheck: true,
|
||||
historyApiFallback: true,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
},
|
||||
proxy: {},
|
||||
},
|
||||
});
|
||||
module.exports = merge(commonConfig, devMode ? devConfig : prodConfig);
|
||||
|
||||
@@ -9,6 +9,5 @@ build/
|
||||
coverage
|
||||
versions/
|
||||
debug.log
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
.d1-workspace.json
|
||||
|
||||
@@ -1,17 +1,21 @@
|
||||
## 使用说明
|
||||
|
||||
### 依赖安装:
|
||||
### 依赖安装(如在 km-console 目录下执行 npm run i 安装过依赖,这步可以省略):
|
||||
|
||||
```
|
||||
npm install
|
||||
```
|
||||
|
||||
注意,这种方式只会安装当前应用的依赖。如果您不了解,推荐在 km-console 目录下执行 npm run i 安装依赖。
|
||||
|
||||
### 启动:
|
||||
|
||||
```
|
||||
npm run start
|
||||
```
|
||||
|
||||
启动后访问地址为 http://localhost:8000
|
||||
|
||||
### 构建:
|
||||
|
||||
```
|
||||
|
||||
@@ -86,12 +86,12 @@ class CoverHtmlWebpackPlugin {
|
||||
|
||||
assetJson.reverse().forEach((item) => {
|
||||
if (/\.js$/.test(item)) {
|
||||
// if (item.includes('vendor~')) {
|
||||
// vendors += `<script async src="${item}"></script>`;
|
||||
// } else {
|
||||
// TODO: entry 只有一个
|
||||
portalMap['@portal/layout'] = item;
|
||||
// }
|
||||
if (item.includes('vendor~')) {
|
||||
vendors += `<script async src="${item}"></script>`;
|
||||
} else {
|
||||
// TODO: entry 只有一个
|
||||
portalMap['@portal/layout'] = item;
|
||||
}
|
||||
} else if (/\.css$/.test(item)) {
|
||||
links += `<link href="${item}" rel="stylesheet">`;
|
||||
}
|
||||
|
||||
@@ -1,187 +0,0 @@
|
||||
/* eslint-disable */
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const ProgressBarPlugin = require('progress-bar-webpack-plugin');
|
||||
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
|
||||
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
|
||||
const CoverHtmlWebpackPlugin = require('./CoverHtmlWebpackPlugin.js');
|
||||
var webpackConfigResolveAlias = require('./webpackConfigResolveAlias');
|
||||
const TerserJSPlugin = require('terser-webpack-plugin');
|
||||
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
||||
const theme = require('./theme');
|
||||
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
|
||||
const HardSourceWebpackPlugin = require('hard-source-webpack-plugin');
|
||||
|
||||
const isProd = process.env.NODE_ENV === 'production';
|
||||
const babelOptions = {
|
||||
cacheDirectory: true,
|
||||
babelrc: false,
|
||||
presets: [require.resolve('@babel/preset-env'), require.resolve('@babel/preset-typescript'), require.resolve('@babel/preset-react')],
|
||||
plugins: [
|
||||
[require.resolve('@babel/plugin-proposal-decorators'), { legacy: true }],
|
||||
[require.resolve('@babel/plugin-proposal-class-properties'), { loose: true }],
|
||||
[require.resolve('@babel/plugin-proposal-private-property-in-object'), { loose: true }],
|
||||
[require.resolve('@babel/plugin-proposal-private-methods'), { loose: true }],
|
||||
require.resolve('@babel/plugin-proposal-export-default-from'),
|
||||
require.resolve('@babel/plugin-proposal-export-namespace-from'),
|
||||
require.resolve('@babel/plugin-proposal-object-rest-spread'),
|
||||
require.resolve('@babel/plugin-transform-runtime'),
|
||||
!isProd && require.resolve('react-refresh/babel'),
|
||||
]
|
||||
.filter(Boolean)
|
||||
.concat([
|
||||
[
|
||||
'babel-plugin-import',
|
||||
{
|
||||
libraryName: 'antd',
|
||||
style: true,
|
||||
},
|
||||
],
|
||||
'@babel/plugin-transform-object-assign',
|
||||
]),
|
||||
};
|
||||
|
||||
module.exports = () => {
|
||||
const cssFileName = isProd ? '[name]-[chunkhash].css' : '[name].css';
|
||||
const plugins = [
|
||||
// !isProd && new HardSourceWebpackPlugin(),
|
||||
new CoverHtmlWebpackPlugin(),
|
||||
new ProgressBarPlugin(),
|
||||
new CaseSensitivePathsPlugin(),
|
||||
new MiniCssExtractPlugin({
|
||||
filename: cssFileName,
|
||||
}),
|
||||
!isProd &&
|
||||
new ReactRefreshWebpackPlugin({
|
||||
overlay: false,
|
||||
}),
|
||||
].filter(Boolean);
|
||||
const resolve = {
|
||||
symlinks: false,
|
||||
extensions: ['.web.jsx', '.web.js', '.ts', '.tsx', '.js', '.jsx', '.json'],
|
||||
alias: webpackConfigResolveAlias,
|
||||
};
|
||||
|
||||
if (isProd) {
|
||||
plugins.push(new CleanWebpackPlugin());
|
||||
}
|
||||
|
||||
if (!isProd) {
|
||||
resolve.mainFields = ['module', 'browser', 'main'];
|
||||
}
|
||||
|
||||
return {
|
||||
externals: isProd
|
||||
? [
|
||||
/^react$/,
|
||||
/^react\/lib.*/,
|
||||
/^react-dom$/,
|
||||
/.*react-dom.*/,
|
||||
/^single-spa$/,
|
||||
/^single-spa-react$/,
|
||||
/^moment$/,
|
||||
/^antd$/,
|
||||
/^lodash$/,
|
||||
/^echarts$/,
|
||||
/^react-router$/,
|
||||
/^react-router-dom$/,
|
||||
]
|
||||
: [],
|
||||
resolve,
|
||||
plugins,
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
parser: { system: false },
|
||||
},
|
||||
{
|
||||
test: /\.(js|jsx)$/,
|
||||
exclude: /node_modules/,
|
||||
use: [
|
||||
{
|
||||
loader: 'babel-loader',
|
||||
options: babelOptions,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.(ts|tsx)$/,
|
||||
use: [
|
||||
{
|
||||
loader: 'babel-loader',
|
||||
options: babelOptions,
|
||||
},
|
||||
{
|
||||
loader: 'ts-loader',
|
||||
options: {
|
||||
allowTsInNodeModules: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.(png|svg|jpeg|jpg|gif|ttf|woff|woff2|eot|pdf|otf)$/,
|
||||
use: [
|
||||
{
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
name: '[name].[ext]',
|
||||
outputPath: './assets/image/',
|
||||
esModule: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.(css|less)$/,
|
||||
use: [
|
||||
{
|
||||
loader: MiniCssExtractPlugin.loader,
|
||||
},
|
||||
'css-loader',
|
||||
{
|
||||
loader: 'less-loader',
|
||||
options: {
|
||||
javascriptEnabled: true,
|
||||
modifyVars: theme,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
optimization: Object.assign(
|
||||
// {
|
||||
// splitChunks: {
|
||||
// cacheGroups: {
|
||||
// vendor: {
|
||||
// test: /[\\/]node_modules[\\/]/,
|
||||
// chunks: 'all',
|
||||
// name: 'vendor',
|
||||
// priority: 10,
|
||||
// enforce: true,
|
||||
// minChunks: 1,
|
||||
// maxSize: 3500000,
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
isProd
|
||||
? {
|
||||
minimizer: [
|
||||
new TerserJSPlugin({
|
||||
cache: true,
|
||||
sourceMap: true,
|
||||
}),
|
||||
new OptimizeCSSAssetsPlugin({}),
|
||||
],
|
||||
}
|
||||
: {}
|
||||
),
|
||||
devtool: isProd ? 'cheap-module-source-map' : '',
|
||||
node: {
|
||||
fs: 'empty',
|
||||
net: 'empty',
|
||||
tls: 'empty',
|
||||
},
|
||||
};
|
||||
};
|
||||
123
km-console/packages/layout-clusters-fe/config/webpack.common.js
Normal file
123
km-console/packages/layout-clusters-fe/config/webpack.common.js
Normal file
@@ -0,0 +1,123 @@
|
||||
const path = require('path');
|
||||
const theme = require('./theme');
|
||||
const webpack = require('webpack');
|
||||
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const ProgressBarPlugin = require('progress-bar-webpack-plugin');
|
||||
const CoverHtmlWebpackPlugin = require('./CoverHtmlWebpackPlugin.js');
|
||||
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
|
||||
|
||||
const devMode = process.env.NODE_ENV === 'development';
|
||||
const babelOptions = {
|
||||
cacheDirectory: true,
|
||||
babelrc: false,
|
||||
presets: [require.resolve('@babel/preset-env'), require.resolve('@babel/preset-typescript'), require.resolve('@babel/preset-react')],
|
||||
plugins: [
|
||||
[require.resolve('@babel/plugin-proposal-decorators'), { legacy: true }],
|
||||
[require.resolve('@babel/plugin-proposal-class-properties'), { loose: true }],
|
||||
[require.resolve('@babel/plugin-proposal-private-property-in-object'), { loose: true }],
|
||||
[require.resolve('@babel/plugin-proposal-private-methods'), { loose: true }],
|
||||
require.resolve('@babel/plugin-proposal-export-default-from'),
|
||||
require.resolve('@babel/plugin-proposal-export-namespace-from'),
|
||||
require.resolve('@babel/plugin-proposal-object-rest-spread'),
|
||||
require.resolve('@babel/plugin-transform-runtime'),
|
||||
devMode && require.resolve('react-refresh/babel'),
|
||||
devMode && [
|
||||
'babel-plugin-import',
|
||||
{
|
||||
libraryName: 'antd',
|
||||
style: true,
|
||||
},
|
||||
],
|
||||
].filter(Boolean),
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
entry: {
|
||||
layout: ['./src/index.tsx'],
|
||||
},
|
||||
resolve: {
|
||||
symlinks: false,
|
||||
extensions: ['.web.jsx', '.web.js', '.ts', '.tsx', '.js', '.jsx', '.json'],
|
||||
alias: {
|
||||
'@src': path.resolve('src'),
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
new CoverHtmlWebpackPlugin(),
|
||||
new ProgressBarPlugin(),
|
||||
new CaseSensitivePathsPlugin(),
|
||||
new webpack.DefinePlugin({
|
||||
'process.env': {
|
||||
NODE_ENV: JSON.stringify(process.env.NODE_ENV),
|
||||
RUN_ENV: JSON.stringify(process.env.RUN_ENV),
|
||||
BUSINESS_VERSION: process.env.BUSINESS_VERSION === 'true',
|
||||
PUBLIC_PATH: JSON.stringify(process.env.PUBLIC_PATH),
|
||||
},
|
||||
}),
|
||||
new HtmlWebpackPlugin({
|
||||
meta: {
|
||||
manifest: 'manifest.json',
|
||||
},
|
||||
template: './src/index.html',
|
||||
favicon: path.resolve('favicon.ico'),
|
||||
inject: 'body',
|
||||
}),
|
||||
],
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
parser: { system: false },
|
||||
},
|
||||
{
|
||||
test: /\.(js|jsx|ts|tsx)$/,
|
||||
exclude: /node_modules/,
|
||||
use: [
|
||||
{
|
||||
loader: 'babel-loader',
|
||||
options: babelOptions,
|
||||
},
|
||||
{
|
||||
loader: 'ts-loader',
|
||||
options: {
|
||||
allowTsInNodeModules: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.(png|svg|jpeg|jpg|gif|ttf|woff|woff2|eot|pdf|otf)$/,
|
||||
use: [
|
||||
{
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
name: '[name].[ext]',
|
||||
outputPath: './assets/image/',
|
||||
esModule: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.(css|less)$/,
|
||||
use: [
|
||||
MiniCssExtractPlugin.loader,
|
||||
'css-loader',
|
||||
{
|
||||
loader: 'less-loader',
|
||||
options: {
|
||||
javascriptEnabled: true,
|
||||
modifyVars: theme,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
node: {
|
||||
fs: 'empty',
|
||||
net: 'empty',
|
||||
tls: 'empty',
|
||||
},
|
||||
stats: 'errors-warnings',
|
||||
};
|
||||
45
km-console/packages/layout-clusters-fe/config/webpack.dev.js
Normal file
45
km-console/packages/layout-clusters-fe/config/webpack.dev.js
Normal file
@@ -0,0 +1,45 @@
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
|
||||
|
||||
module.exports = {
|
||||
mode: 'development',
|
||||
plugins: [
|
||||
new MiniCssExtractPlugin(),
|
||||
new ReactRefreshWebpackPlugin({
|
||||
overlay: false,
|
||||
}),
|
||||
],
|
||||
output: {
|
||||
path: '/',
|
||||
publicPath: '/',
|
||||
filename: '[name].js',
|
||||
chunkFilename: '[name].js',
|
||||
library: 'layout',
|
||||
libraryTarget: 'amd',
|
||||
},
|
||||
devServer: {
|
||||
host: 'localhost',
|
||||
port: 8000,
|
||||
hot: true,
|
||||
open: true,
|
||||
openPage: 'http://localhost:8000/',
|
||||
inline: true,
|
||||
historyApiFallback: true,
|
||||
publicPath: `http://localhost:8000/`,
|
||||
headers: {
|
||||
'cache-control': 'no-cache',
|
||||
pragma: 'no-cache',
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
},
|
||||
proxy: {
|
||||
'/ks-km/api/v3': {
|
||||
changeOrigin: true,
|
||||
target: 'http://localhost:8080/',
|
||||
},
|
||||
'/logi-security/api/v1': {
|
||||
changeOrigin: true,
|
||||
target: 'http://localhost:8080/',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,79 @@
|
||||
const path = require('path');
|
||||
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
|
||||
const CountPlugin = require('./CountComponentWebpackPlugin');
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const CopyWebpackPlugin = require('copy-webpack-plugin');
|
||||
const TerserJSPlugin = require('terser-webpack-plugin');
|
||||
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
||||
|
||||
const outputPath = path.resolve(process.cwd(), `../../../km-rest/src/main/resources/templates/layout`);
|
||||
|
||||
module.exports = {
|
||||
mode: 'production',
|
||||
plugins: [
|
||||
new CleanWebpackPlugin(),
|
||||
new CountPlugin({
|
||||
pathname: 'knowdesign',
|
||||
startCount: true,
|
||||
isExportExcel: false,
|
||||
}),
|
||||
new MiniCssExtractPlugin({
|
||||
filename: '[name]-[chunkhash].css',
|
||||
}),
|
||||
new CopyWebpackPlugin([
|
||||
{
|
||||
from: path.resolve(process.cwd(), 'static'),
|
||||
to: path.resolve(outputPath, '../static'),
|
||||
},
|
||||
{
|
||||
from: path.resolve(process.cwd(), 'favicon.ico'),
|
||||
to: path.resolve(outputPath, '../favicon.ico'),
|
||||
},
|
||||
]),
|
||||
],
|
||||
externals: [
|
||||
/^react$/,
|
||||
/^react\/lib.*/,
|
||||
/^react-dom$/,
|
||||
/.*react-dom.*/,
|
||||
/^single-spa$/,
|
||||
/^single-spa-react$/,
|
||||
/^moment$/,
|
||||
/^antd$/,
|
||||
/^lodash$/,
|
||||
/^echarts$/,
|
||||
/^react-router$/,
|
||||
/^react-router-dom$/,
|
||||
],
|
||||
output: {
|
||||
path: outputPath,
|
||||
publicPath: process.env.PUBLIC_PATH + '/layout/',
|
||||
filename: '[name]-[chunkhash].js',
|
||||
chunkFilename: '[name]-[chunkhash].js',
|
||||
library: 'layout',
|
||||
libraryTarget: 'amd',
|
||||
},
|
||||
optimization: {
|
||||
splitChunks: {
|
||||
cacheGroups: {
|
||||
vendor: {
|
||||
test: /[\\/]node_modules[\\/]/,
|
||||
chunks: 'all',
|
||||
name: 'vendor',
|
||||
priority: 10,
|
||||
enforce: true,
|
||||
minChunks: 1,
|
||||
maxSize: 3000000,
|
||||
},
|
||||
},
|
||||
},
|
||||
minimizer: [
|
||||
new TerserJSPlugin({
|
||||
cache: true,
|
||||
sourceMap: true,
|
||||
}),
|
||||
new OptimizeCSSAssetsPlugin({}),
|
||||
],
|
||||
},
|
||||
devtool: 'none',
|
||||
};
|
||||
@@ -1,5 +0,0 @@
|
||||
var path = require('path');
|
||||
|
||||
module.exports = {
|
||||
react: path.resolve('./node_modules/react'),
|
||||
};
|
||||
14905
km-console/packages/layout-clusters-fe/package-lock.json
generated
Normal file
14905
km-console/packages/layout-clusters-fe/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user