mirror of
https://github.com/didi/KnowStreaming.git
synced 2025-12-24 11:52:08 +08:00
Merge branch 'dev' of https://github.com/didi/LogiKM into didi-dev
This commit is contained in:
@@ -16,10 +16,11 @@
|
||||
<properties>
|
||||
<java_source_version>1.8</java_source_version>
|
||||
<java_target_version>1.8</java_target_version>
|
||||
<springframework.boot.version>2.1.1.RELEASE</springframework.boot.version>
|
||||
<spring-version>5.1.3.RELEASE</spring-version>
|
||||
<springframework.boot.version>2.1.18.RELEASE</springframework.boot.version>
|
||||
<spring-version>5.1.19.RELEASE</spring-version>
|
||||
<failOnMissingWebXml>false</failOnMissingWebXml>
|
||||
<tomcat.version>8.5.66</tomcat.version>
|
||||
<tomcat.version>8.5.72</tomcat.version>
|
||||
<log4j2.version>2.16.0</log4j2.version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
@@ -109,8 +110,10 @@
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<finalName>kafka-manager</finalName>
|
||||
<plugins>
|
||||
<plugin>
|
||||
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
<version>${springframework.boot.version}</version>
|
||||
@@ -121,6 +124,7 @@
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
@@ -32,7 +32,7 @@ import java.util.stream.Collectors;
|
||||
*/
|
||||
@Api(tags = "开放接口-Broker相关接口(REST)")
|
||||
@RestController
|
||||
@RequestMapping(ApiPrefix.API_V1_THIRD_PART_OP_PREFIX)
|
||||
@RequestMapping(ApiPrefix.API_V1_THIRD_PART_PREFIX)
|
||||
public class ThirdPartBrokerController {
|
||||
@Autowired
|
||||
private BrokerService brokerService;
|
||||
@@ -44,7 +44,7 @@ public class ThirdPartBrokerController {
|
||||
private ClusterService clusterService;
|
||||
|
||||
@ApiOperation(value = "Broker信息概览", notes = "")
|
||||
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/overview", method = RequestMethod.GET)
|
||||
@GetMapping(value = "{clusterId}/brokers/{brokerId}/overview")
|
||||
@ResponseBody
|
||||
public Result<ThirdPartBrokerOverviewVO> getBrokerOverview(@PathVariable Long clusterId,
|
||||
@PathVariable Integer brokerId) {
|
||||
@@ -70,7 +70,7 @@ public class ThirdPartBrokerController {
|
||||
}
|
||||
|
||||
@ApiOperation(value = "BrokerRegion信息", notes = "所有集群的")
|
||||
@RequestMapping(value = "broker-regions", method = RequestMethod.GET)
|
||||
@GetMapping(value = "broker-regions")
|
||||
@ResponseBody
|
||||
public Result<List<BrokerRegionVO>> getBrokerRegions() {
|
||||
List<ClusterDO> clusterDOList = clusterService.list();
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
package com.xiaojukeji.kafka.manager.web.config;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.service.utils.ConfigUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.web.servlet.config.annotation.*;
|
||||
@@ -20,6 +22,9 @@ import springfox.documentation.swagger2.annotations.EnableSwagger2;
|
||||
@EnableWebMvc
|
||||
@EnableSwagger2
|
||||
public class SwaggerConfig implements WebMvcConfigurer {
|
||||
@Autowired
|
||||
private ConfigUtils configUtils;
|
||||
|
||||
@Override
|
||||
public void addResourceHandlers(ResourceHandlerRegistry registry) {
|
||||
registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
|
||||
@@ -39,10 +44,9 @@ public class SwaggerConfig implements WebMvcConfigurer {
|
||||
|
||||
private ApiInfo apiInfo() {
|
||||
return new ApiInfoBuilder()
|
||||
.title("Logi-KafkaManager 接口文档")
|
||||
.description("欢迎使用滴滴Logi-KafkaManager")
|
||||
.contact("huangyiminghappy@163.com")
|
||||
.version("2.2.0")
|
||||
.title("LogiKM接口文档")
|
||||
.description("欢迎使用滴滴LogiKM")
|
||||
.version(configUtils.getApplicationVersion())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
package com.xiaojukeji.kafka.manager.web.converters;
|
||||
|
||||
import com.xiaojukeji.kafka.manager.common.entity.ao.account.Account;
|
||||
import com.xiaojukeji.kafka.manager.bpm.common.OrderResult;
|
||||
import com.xiaojukeji.kafka.manager.bpm.common.OrderStatusEnum;
|
||||
import com.xiaojukeji.kafka.manager.bpm.common.entry.BaseOrderDetailData;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.ao.account.Account;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.OrderDO;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.vo.common.AccountVO;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.order.OrderResultVO;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.order.OrderVO;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.vo.normal.order.detail.OrderDetailBaseVO;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.CopyUtils;
|
||||
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
|
||||
import com.xiaojukeji.kafka.manager.common.entity.pojo.OrderDO;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
@@ -41,7 +42,9 @@ public class OrderConverter {
|
||||
}
|
||||
OrderVO orderVO = new OrderVO();
|
||||
CopyUtils.copyProperties(orderVO, orderDO);
|
||||
orderVO.setGmtTime(orderDO.getGmtCreate());
|
||||
if (OrderStatusEnum.WAIT_DEAL.getCode().equals(orderDO.getStatus())) {
|
||||
orderVO.setGmtHandle(null);
|
||||
}
|
||||
return orderVO;
|
||||
}
|
||||
|
||||
|
||||
@@ -95,12 +95,21 @@ public class ReassignModelConverter {
|
||||
vo.setBeginTime(0L);
|
||||
vo.setEndTime(0L);
|
||||
|
||||
StringBuilder clusterAndTopicName = new StringBuilder();
|
||||
|
||||
Integer completedTopicNum = 0;
|
||||
Set<Integer> statusSet = new HashSet<>();
|
||||
for (ReassignTaskDO elem: doList) {
|
||||
vo.setGmtCreate(elem.getGmtCreate().getTime());
|
||||
vo.setOperator(elem.getOperator());
|
||||
vo.setDescription(elem.getDescription());
|
||||
|
||||
if (clusterAndTopicName.length() == 0) {
|
||||
clusterAndTopicName.append("-").append(elem.getClusterId()).append("-").append(elem.getTopicName());
|
||||
} else {
|
||||
clusterAndTopicName.append("等");
|
||||
}
|
||||
|
||||
if (TaskStatusReassignEnum.isFinished(elem.getStatus())) {
|
||||
completedTopicNum += 1;
|
||||
statusSet.add(elem.getStatus());
|
||||
@@ -114,6 +123,9 @@ public class ReassignModelConverter {
|
||||
vo.setBeginTime(elem.getBeginTime().getTime());
|
||||
}
|
||||
|
||||
// 任务名称上,增加展示集群ID和Topic名称,多个时,仅展示第一个. PR from Hongten
|
||||
vo.setTaskName(String.format("%s 数据迁移任务%s", DateUtils.getFormattedDate(taskId), clusterAndTopicName.toString()));
|
||||
|
||||
// 任务整体状态
|
||||
if (statusSet.contains(TaskStatusReassignEnum.RUNNING.getCode())) {
|
||||
vo.setStatus(TaskStatusReassignEnum.RUNNING.getCode());
|
||||
|
||||
@@ -29,6 +29,7 @@ public class TopicMineConverter {
|
||||
vo.setClusterName(data.getLogicalClusterName());
|
||||
vo.setBytesIn(data.getBytesIn());
|
||||
vo.setBytesOut(data.getBytesOut());
|
||||
vo.setDescription(data.getDescription());
|
||||
voList.add(vo);
|
||||
}
|
||||
return voList;
|
||||
|
||||
@@ -9,6 +9,9 @@ server:
|
||||
spring:
|
||||
application:
|
||||
name: kafkamanager
|
||||
version: @project.version@
|
||||
profiles:
|
||||
active: dev
|
||||
datasource:
|
||||
kafka-manager:
|
||||
jdbc-url: jdbc:mysql://localhost:3306/logi_kafka_manager?characterEncoding=UTF-8&useSSL=false&serverTimezone=GMT%2B8
|
||||
@@ -18,8 +21,6 @@ spring:
|
||||
main:
|
||||
allow-bean-definition-overriding: true
|
||||
|
||||
profiles:
|
||||
active: dev
|
||||
servlet:
|
||||
multipart:
|
||||
max-file-size: 100MB
|
||||
@@ -30,27 +31,57 @@ logging:
|
||||
|
||||
custom:
|
||||
idc: cn
|
||||
jmx:
|
||||
max-conn: 10 # 2.3版本配置不在这个地方生效
|
||||
store-metrics-task:
|
||||
community:
|
||||
broker-metrics-enabled: true
|
||||
topic-metrics-enabled: true
|
||||
didi:
|
||||
didi: # 滴滴Kafka特有的指标
|
||||
app-topic-metrics-enabled: false
|
||||
topic-request-time-metrics-enabled: false
|
||||
topic-throttled-metrics: false
|
||||
save-days: 7
|
||||
topic-throttled-metrics-enabled: false
|
||||
|
||||
# 任务相关的开关
|
||||
# 任务相关的配置
|
||||
task:
|
||||
op:
|
||||
sync-topic-enabled: false # 未落盘的Topic定期同步到DB中
|
||||
order-auto-exec: # 工单自动化审批线程的开关
|
||||
topic-enabled: false # Topic工单自动化审批开关, false:关闭自动化审批, true:开启
|
||||
app-enabled: false # App工单自动化审批开关, false:关闭自动化审批, true:开启
|
||||
sync-topic-enabled: false # 未落盘的Topic定期同步到DB中
|
||||
order-auto-exec: # 工单自动化审批线程的开关
|
||||
topic-enabled: false # Topic工单自动化审批开关, false:关闭自动化审批, true:开启
|
||||
app-enabled: false # App工单自动化审批开关, false:关闭自动化审批, true:开启
|
||||
metrics:
|
||||
collect: # 收集指标
|
||||
broker-metrics-enabled: true # 收集Broker指标
|
||||
sink: # 上报指标
|
||||
cluster-metrics: # 上报cluster指标
|
||||
sink-db-enabled: true # 上报到db
|
||||
broker-metrics: # 上报broker指标
|
||||
sink-db-enabled: true # 上报到db
|
||||
delete: # 删除指标
|
||||
delete-limit-size: 1000 # 单次删除的批大小
|
||||
cluster-metrics-save-days: 14 # 集群指标保存天数
|
||||
broker-metrics-save-days: 14 # Broker指标保存天数
|
||||
topic-metrics-save-days: 7 # Topic指标保存天数
|
||||
topic-request-time-metrics-save-days: 7 # Topic请求耗时指标保存天数
|
||||
topic-throttled-metrics-save-days: 7 # Topic限流指标保存天数
|
||||
app-topic-metrics-save-days: 7 # App+Topic指标保存天数
|
||||
|
||||
thread-pool:
|
||||
collect-metrics:
|
||||
thread-num: 256 # 收集指标线程池大小
|
||||
queue-size: 5000 # 收集指标线程池的queue大小
|
||||
api-call:
|
||||
thread-num: 16 # api服务线程池大小
|
||||
queue-size: 5000 # api服务线程池的queue大小
|
||||
|
||||
client-pool:
|
||||
kafka-consumer:
|
||||
min-idle-client-num: 24 # 最小空闲客户端数
|
||||
max-idle-client-num: 24 # 最大空闲客户端数
|
||||
max-total-client-num: 24 # 最大客户端数
|
||||
borrow-timeout-unit-ms: 3000 # 租借超时时间,单位毫秒
|
||||
|
||||
account:
|
||||
jump-login:
|
||||
gateway-api: false # 网关接口
|
||||
third-part-api: false # 第三方接口
|
||||
ldap:
|
||||
enabled: false
|
||||
url: ldap://127.0.0.1:389/
|
||||
@@ -64,19 +95,20 @@ account:
|
||||
auth-user-registration: true
|
||||
auth-user-registration-role: normal
|
||||
|
||||
kcm:
|
||||
enabled: false
|
||||
s3:
|
||||
kcm: # 集群安装部署,仅安装broker
|
||||
enabled: false # 是否开启
|
||||
s3: # s3 存储服务
|
||||
endpoint: s3.didiyunapi.com
|
||||
access-key: 1234567890
|
||||
secret-key: 0987654321
|
||||
bucket: logi-kafka
|
||||
n9e:
|
||||
base-url: http://127.0.0.1:8004
|
||||
user-token: 12345678
|
||||
timeout: 300
|
||||
account: root
|
||||
script-file: kcm_script.sh
|
||||
n9e: # 夜莺
|
||||
base-url: http://127.0.0.1:8004 # 夜莺job服务地址
|
||||
user-token: 12345678 # 用户的token
|
||||
timeout: 300 # 当台操作的超时时间
|
||||
account: root # 操作时使用的账号
|
||||
script-file: kcm_script.sh # 脚本,已内置好,在源码的kcm模块内,此处配置无需修改
|
||||
logikm-url: http://127.0.0.1:8080 # logikm部署地址,部署时kcm_script.sh会调用logikm检查部署中的一些状态
|
||||
|
||||
monitor:
|
||||
enabled: false
|
||||
|
||||
Reference in New Issue
Block a user