Merge pull request #473 from didi/dev

Dev
This commit is contained in:
EricZeng
2022-03-07 14:49:52 +08:00
committed by GitHub
147 changed files with 1828 additions and 924 deletions

View File

@@ -16,10 +16,9 @@
<properties>
<java_source_version>1.8</java_source_version>
<java_target_version>1.8</java_target_version>
<springframework.boot.version>2.1.1.RELEASE</springframework.boot.version>
<spring-version>5.1.3.RELEASE</spring-version>
<failOnMissingWebXml>false</failOnMissingWebXml>
<tomcat.version>8.5.72</tomcat.version>
<log4j2.version>2.16.0</log4j2.version>
</properties>
<dependencies>
@@ -72,22 +71,22 @@
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<version>${springframework.boot.version}</version>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
<version>${springframework.boot.version}</version>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
<version>${springframework.boot.version}</version>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
<version>${springframework.boot.version}</version>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
@@ -104,7 +103,6 @@
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
<version>${spring-version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
@@ -116,11 +114,11 @@
<build>
<finalName>kafka-manager</finalName>
<plugins>
<plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>${springframework.boot.version}</version>
<version>${spring.boot.version}</version>
<executions>
<execution>
<goals>

View File

@@ -3,7 +3,6 @@ package com.xiaojukeji.kafka.manager.web;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.scheduling.annotation.EnableAsync;
@@ -17,7 +16,6 @@ import org.springframework.scheduling.annotation.EnableScheduling;
@EnableAsync
@EnableScheduling
@ServletComponentScan
@EnableAutoConfiguration
@SpringBootApplication(scanBasePackages = {"com.xiaojukeji.kafka.manager"})
public class MainApplication {
private static final Logger LOGGER = LoggerFactory.getLogger(MainApplication.class);
@@ -28,7 +26,8 @@ public class MainApplication {
sa.run(args);
LOGGER.info("MainApplication started");
} catch (Exception e) {
e.printStackTrace();
LOGGER.error("start failed and application exit", e);
System.exit(1);
}
}
}

View File

@@ -32,7 +32,7 @@ import java.util.stream.Collectors;
*/
@Api(tags = "开放接口-Broker相关接口(REST)")
@RestController
@RequestMapping(ApiPrefix.API_V1_THIRD_PART_OP_PREFIX)
@RequestMapping(ApiPrefix.API_V1_THIRD_PART_PREFIX)
public class ThirdPartBrokerController {
@Autowired
private BrokerService brokerService;
@@ -44,7 +44,7 @@ public class ThirdPartBrokerController {
private ClusterService clusterService;
@ApiOperation(value = "Broker信息概览", notes = "")
@RequestMapping(value = "{clusterId}/brokers/{brokerId}/overview", method = RequestMethod.GET)
@GetMapping(value = "{clusterId}/brokers/{brokerId}/overview")
@ResponseBody
public Result<ThirdPartBrokerOverviewVO> getBrokerOverview(@PathVariable Long clusterId,
@PathVariable Integer brokerId) {
@@ -70,7 +70,7 @@ public class ThirdPartBrokerController {
}
@ApiOperation(value = "BrokerRegion信息", notes = "所有集群的")
@RequestMapping(value = "broker-regions", method = RequestMethod.GET)
@GetMapping(value = "broker-regions")
@ResponseBody
public Result<List<BrokerRegionVO>> getBrokerRegions() {
List<ClusterDO> clusterDOList = clusterService.list();

View File

@@ -1,5 +1,7 @@
package com.xiaojukeji.kafka.manager.web.config;
import com.xiaojukeji.kafka.manager.service.utils.ConfigUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.*;
@@ -20,6 +22,9 @@ import springfox.documentation.swagger2.annotations.EnableSwagger2;
@EnableWebMvc
@EnableSwagger2
public class SwaggerConfig implements WebMvcConfigurer {
@Autowired
private ConfigUtils configUtils;
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
@@ -39,10 +44,9 @@ public class SwaggerConfig implements WebMvcConfigurer {
private ApiInfo apiInfo() {
return new ApiInfoBuilder()
.title("Logi-KafkaManager 接口文档")
.description("欢迎使用滴滴Logi-KafkaManager")
.contact("huangyiminghappy@163.com")
.version("2.2.0")
.title("LogiKM接口文档")
.description("欢迎使用滴滴LogiKM")
.version(configUtils.getApplicationVersion())
.build();
}

View File

@@ -13,11 +13,19 @@ import java.util.List;
* @date 19/5/3
*/
public class AccountConverter {
private AccountConverter() {
}
public static AccountDO convert2AccountDO(AccountDTO dto) {
AccountDO accountDO = new AccountDO();
accountDO.setUsername(dto.getUsername());
accountDO.setPassword(dto.getPassword());
accountDO.setRole(dto.getRole());
// 兼容前端未传这些信息的情况
accountDO.setDepartment(dto.getDepartment() == null? "": dto.getDepartment());
accountDO.setMail(dto.getMail() == null? "": dto.getMail());
accountDO.setDisplayName(dto.getDisplayName() == null? "": dto.getDisplayName());
return accountDO;
}

View File

@@ -95,12 +95,21 @@ public class ReassignModelConverter {
vo.setBeginTime(0L);
vo.setEndTime(0L);
StringBuilder clusterAndTopicName = new StringBuilder();
Integer completedTopicNum = 0;
Set<Integer> statusSet = new HashSet<>();
for (ReassignTaskDO elem: doList) {
vo.setGmtCreate(elem.getGmtCreate().getTime());
vo.setOperator(elem.getOperator());
vo.setDescription(elem.getDescription());
if (clusterAndTopicName.length() == 0) {
clusterAndTopicName.append("-").append(elem.getClusterId()).append("-").append(elem.getTopicName());
} else {
clusterAndTopicName.append("");
}
if (TaskStatusReassignEnum.isFinished(elem.getStatus())) {
completedTopicNum += 1;
statusSet.add(elem.getStatus());
@@ -114,6 +123,9 @@ public class ReassignModelConverter {
vo.setBeginTime(elem.getBeginTime().getTime());
}
// 任务名称上增加展示集群ID和Topic名称多个时仅展示第一个. PR from Hongten
vo.setTaskName(String.format("%s 数据迁移任务%s", DateUtils.getFormattedDate(taskId), clusterAndTopicName.toString()));
// 任务整体状态
if (statusSet.contains(TaskStatusReassignEnum.RUNNING.getCode())) {
vo.setStatus(TaskStatusReassignEnum.RUNNING.getCode());

View File

@@ -1,7 +1,6 @@
package com.xiaojukeji.kafka.manager.web.metrics;
import com.codahale.metrics.*;
import com.xiaojukeji.kafka.manager.common.constant.LogConstant;
import com.xiaojukeji.kafka.manager.common.utils.factory.DefaultThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -21,7 +20,7 @@ import java.util.concurrent.TimeUnit;
*/
@Component
public class MetricsRegistry {
private static final Logger LOGGER = LoggerFactory.getLogger(LogConstant.API_METRICS_LOGGER);
private static final Logger LOGGER = LoggerFactory.getLogger(MetricsRegistry.class);
private static final DecimalFormat DECIMAL_FORMAT = new DecimalFormat("#");

View File

@@ -9,6 +9,7 @@ server:
spring:
application:
name: kafkamanager
version: 2.6.0
profiles:
active: dev
datasource:
@@ -30,27 +31,57 @@ logging:
custom:
idc: cn
jmx:
max-conn: 10 # 2.3版本配置不在这个地方生效
store-metrics-task:
community:
broker-metrics-enabled: true
topic-metrics-enabled: true
didi:
didi: # 滴滴Kafka特有的指标
app-topic-metrics-enabled: false
topic-request-time-metrics-enabled: false
topic-throttled-metrics: false
save-days: 7
topic-throttled-metrics-enabled: false
# 任务相关的开关
# 任务相关的配置
task:
op:
sync-topic-enabled: false # 未落盘的Topic定期同步到DB中
order-auto-exec: # 工单自动化审批线程的开关
topic-enabled: false # Topic工单自动化审批开关, false:关闭自动化审批, true:开启
app-enabled: false # App工单自动化审批开关, false:关闭自动化审批, true:开启
sync-topic-enabled: false # 未落盘的Topic定期同步到DB中
order-auto-exec: # 工单自动化审批线程的开关
topic-enabled: false # Topic工单自动化审批开关, false:关闭自动化审批, true:开启
app-enabled: false # App工单自动化审批开关, false:关闭自动化审批, true:开启
metrics:
collect: # 收集指标
broker-metrics-enabled: true # 收集Broker指标
sink: # 上报指标
cluster-metrics: # 上报cluster指标
sink-db-enabled: true # 上报到db
broker-metrics: # 上报broker指标
sink-db-enabled: true # 上报到db
delete: # 删除指标
delete-limit-size: 1000 # 单次删除的批大小
cluster-metrics-save-days: 14 # 集群指标保存天数
broker-metrics-save-days: 14 # Broker指标保存天数
topic-metrics-save-days: 7 # Topic指标保存天数
topic-request-time-metrics-save-days: 7 # Topic请求耗时指标保存天数
topic-throttled-metrics-save-days: 7 # Topic限流指标保存天数
app-topic-metrics-save-days: 7 # App+Topic指标保存天数
thread-pool:
collect-metrics:
thread-num: 256 # 收集指标线程池大小
queue-size: 5000 # 收集指标线程池的queue大小
api-call:
thread-num: 16 # api服务线程池大小
queue-size: 5000 # api服务线程池的queue大小
client-pool:
kafka-consumer:
min-idle-client-num: 24 # 最小空闲客户端数
max-idle-client-num: 24 # 最大空闲客户端数
max-total-client-num: 24 # 最大客户端数
borrow-timeout-unit-ms: 3000 # 租借超时时间,单位毫秒
account:
jump-login:
gateway-api: false # 网关接口
third-part-api: false # 第三方接口
ldap:
enabled: false
url: ldap://127.0.0.1:389/
@@ -64,19 +95,20 @@ account:
auth-user-registration: true
auth-user-registration-role: normal
kcm:
enabled: false
s3:
kcm: # 集群安装部署仅安装broker
enabled: false # 是否开启
s3: # s3 存储服务
endpoint: s3.didiyunapi.com
access-key: 1234567890
secret-key: 0987654321
bucket: logi-kafka
n9e:
base-url: http://127.0.0.1:8004
user-token: 12345678
timeout: 300
account: root
script-file: kcm_script.sh
n9e: # 夜莺
base-url: http://127.0.0.1:8004 # 夜莺job服务地址
user-token: 12345678 # 用户的token
timeout: 300 # 当台操作的超时时间
account: root # 操作时使用的账号
script-file: kcm_script.sh # 脚本已内置好在源码的kcm模块内此处配置无需修改
logikm-url: http://127.0.0.1:8080 # logikm部署地址部署时kcm_script.sh会调用logikm检查部署中的一些状态
monitor:
enabled: false

View File

@@ -131,15 +131,15 @@
</filter>
</appender>
<!-- Metrics信息收集日志 -->
<appender name="COLLECTOR_METRICS_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/metrics/collector_metrics.log</file>
<!-- Task模块相关日志 -->
<appender name="TASK_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/log_task.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/metrics/collector_metrics_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<fileNamePattern>${log.path}/log_task_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
@@ -147,15 +147,15 @@
</rollingPolicy>
</appender>
<!-- Metrics信息收集日志 -->
<!-- Api-Metrics信息相关日志 -->
<appender name="API_METRICS_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/metrics/api_metrics.log</file>
<file>${log.path}/api_metrics.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/metrics/api_metrics_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<fileNamePattern>${log.path}/api_metrics_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
@@ -163,31 +163,13 @@
</rollingPolicy>
</appender>
<!-- Metrics信息收集日志 -->
<appender name="SCHEDULED_TASK_LOGGER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/metrics/scheduled_tasks.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/metrics/scheduled_tasks_%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<maxHistory>5</maxHistory>
</rollingPolicy>
</appender>
<logger name="com.xiaojukeji.kafka.manager.task" level="INFO" additivity="false">
<appender-ref ref="TASK_LOGGER"/>
</logger>
<logger name="COLLECTOR_METRICS_LOGGER" level="DEBUG" additivity="false">
<appender-ref ref="COLLECTOR_METRICS_LOGGER"/>
</logger>
<logger name="API_METRICS_LOGGER" level="DEBUG" additivity="false">
<logger name="com.xiaojukeji.kafka.manager.web.metrics" level="INFO" additivity="false">
<appender-ref ref="API_METRICS_LOGGER"/>
</logger>
<logger name="SCHEDULED_TASK_LOGGER" level="DEBUG" additivity="false">
<appender-ref ref="SCHEDULED_TASK_LOGGER"/>
</logger>
<logger name="org.apache.ibatis" level="INFO" additivity="false" />
<logger name="org.mybatis.spring" level="INFO" additivity="false" />
@@ -199,17 +181,6 @@
<appender-ref ref="INFO_FILE" />
<appender-ref ref="WARN_FILE" />
<appender-ref ref="ERROR_FILE" />
<!--<appender-ref ref="METRICS_LOG" />-->
<!-- <appender-ref ref="TASK_LOGGER" />-->
</root>
<!--生产环境:输出到文件-->
<!--<springProfile name="pro">-->
<!--<root level="info">-->
<!--<appender-ref ref="CONSOLE" />-->
<!--<appender-ref ref="DEBUG_FILE" />-->
<!--<appender-ref ref="INFO_FILE" />-->
<!--<appender-ref ref="ERROR_FILE" />-->
<!--<appender-ref ref="WARN_FILE" />-->
<!--</root>-->
<!--</springProfile>-->
</configuration>