v2.8.0_e初始化

1、测试代码,开源用户尽量不要使用;
2、包含Kafka-HA的相关功能;
3、并非基于2.6.0拉的分支,是基于master分支的 commit-id: 462303fca0 拉的2.8.0_e的分支。出现这个情况的原因是v2.6.0的代码并不是最新的,2.x最新的代码是 462303fca0 这个commit对应的代码;
This commit is contained in:
zengqiao
2023-02-13 16:35:43 +08:00
parent 462303fca0
commit e81c0f3040
178 changed files with 9938 additions and 1674 deletions

View File

@@ -33,8 +33,8 @@
</dependency>
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>mysql</groupId>

View File

@@ -25,6 +25,7 @@ public interface AuthorityDao {
List<AuthorityDO> getAuthority(Long clusterId, String topicName, String appId);
List<AuthorityDO> getAuthorityByTopic(Long clusterId, String topicName);
List<AuthorityDO> getAuthorityByTopicFromCache(Long clusterId, String topicName);
List<AuthorityDO> getByAppId(String appId);

View File

@@ -49,6 +49,28 @@ public class AuthorityDaoImpl implements AuthorityDao {
return sqlSession.selectList("AuthorityDao.getAuthorityByTopic", params);
}
@Override
public List<AuthorityDO> getAuthorityByTopicFromCache(Long clusterId, String topicName) {
updateAuthorityCache();
List<AuthorityDO> doList = new ArrayList<>();
for (Map<Long, Map<String, AuthorityDO>> authMap: AUTHORITY_MAP.values()) {
Map<String, AuthorityDO> doMap = authMap.get(clusterId);
if (doMap == null) {
continue;
}
AuthorityDO authorityDO = doMap.get(topicName);
if (authorityDO == null) {
continue;
}
doList.add(authorityDO);
}
return doList;
}
@Override
public List<AuthorityDO> getByAppId(String appId) {
updateAuthorityCache();

View File

@@ -0,0 +1,12 @@
package com.xiaojukeji.kafka.manager.dao.ha;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASRelationDO;
import org.springframework.stereotype.Repository;
/**
* 主备关系信息
*/
@Repository
public interface HaASRelationDao extends BaseMapper<HaASRelationDO> {
}

View File

@@ -0,0 +1,17 @@
package com.xiaojukeji.kafka.manager.dao.ha;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchJobDO;
import org.springframework.stereotype.Repository;
import java.util.List;
/**
* 主备关系切换任务
*/
@Repository
public interface HaASSwitchJobDao extends BaseMapper<HaASSwitchJobDO> {
int addAndSetId(HaASSwitchJobDO jobDO);
List<HaASSwitchJobDO> listAllLatest();
}

View File

@@ -0,0 +1,12 @@
package com.xiaojukeji.kafka.manager.dao.ha;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchSubJobDO;
import org.springframework.stereotype.Repository;
/**
* 主备关系切换子任务
*/
@Repository
public interface HaASSwitchSubJobDao extends BaseMapper<HaASSwitchSubJobDO> {
}

View File

@@ -0,0 +1,12 @@
package com.xiaojukeji.kafka.manager.dao.ha;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.JobLogDO;
import org.springframework.stereotype.Repository;
/**
* Job的Log, 正常来说应该与TopicDao等放在一起的但是因为使用了mybatis-plus因此零时放在这个地方
*/
@Repository
public interface JobLogDao extends BaseMapper<JobLogDO> {
}

View File

@@ -23,7 +23,11 @@ public class ClusterDaoImpl implements ClusterDao {
@Override
public int insert(ClusterDO clusterDO) {
return sqlSession.insert("ClusterDao.insert", clusterDO);
if (clusterDO.getId() != null) {
return sqlSession.insert("ClusterDao.insertWithId", clusterDO);
} else {
return sqlSession.insert("ClusterDao.insert", clusterDO);
}
}
@Override

View File

@@ -15,6 +15,14 @@
<result column="jmx_properties" property="jmxProperties" />
</resultMap>
<insert id="insertWithId" parameterType="com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO">
INSERT INTO cluster (
id, cluster_name, zookeeper, bootstrap_servers, security_properties, jmx_properties
) VALUES (
#{id}, #{clusterName}, #{zookeeper}, #{bootstrapServers}, #{securityProperties}, #{jmxProperties}
)
</insert>
<insert id="insert"
parameterType="com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO"
useGeneratedKeys="true"

View File

@@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xiaojukeji.kafka.manager.dao.ha.HaASRelationDao">
<resultMap id="HaASRelationMap" type="com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASRelationDO">
<id column="id" property="id" />
<result column="create_time" property="createTime" />
<result column="modify_time" property="modifyTime" />
<result column="active_cluster_phy_id" property="activeClusterPhyId" />
<result column="active_res_name" property="activeResName" />
<result column="standby_cluster_phy_id" property="standbyClusterPhyId" />
<result column="standby_res_name" property="standbyResName" />
<result column="res_type" property="resType" />
<result column="status" property="status" />
<result column="unique_field" property="uniqueField" />
</resultMap>
</mapper>

View File

@@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xiaojukeji.kafka.manager.dao.ha.HaASSwitchJobDao">
<resultMap id="HaASSwitchJobMap" type="com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchJobDO">
<id column="id" property="id" />
<result column="create_time" property="createTime" />
<result column="modify_time" property="modifyTime" />
<result column="active_cluster_phy_id" property="activeClusterPhyId" />
<result column="standby_cluster_phy_id" property="standbyClusterPhyId" />
<result column="job_status" property="jobStatus" />
<result column="operator" property="operator" />
</resultMap>
<insert id="addAndSetId"
parameterType="com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchJobDO"
useGeneratedKeys="true"
keyProperty="id">
INSERT INTO ks_km_physical_cluster
(active_cluster_phy_id, standby_cluster_phy_id, job_status, operator)
VALUES
(#{activeClusterPhyId}, #{standbyClusterPhyId}, #{jobStatus}, #{operator})
</insert>
<select id="listAllLatest" resultMap="HaASSwitchJobMap">
SELECT Table_B.* FROM
(SELECT max(`id`) as id FROM `ha_active_standby_switch_job` GROUP BY `active_cluster_phy_id`, `standby_cluster_phy_id`) AS Table_A
JOIN `ha_active_standby_switch_job` AS Table_B ON
Table_A.id=Table_B.id;
</select>
</mapper>

View File

@@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xiaojukeji.kafka.manager.dao.ha.HaASSwitchSubJobDao">
<resultMap id="HaASSwitchSubJobMap" type="com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchSubJobDO">
<id column="id" property="id" />
<result column="create_time" property="createTime" />
<result column="modify_time" property="modifyTime" />
<result column="job_id" property="jobId" />
<result column="active_cluster_phy_id" property="activeClusterPhyId" />
<result column="active_res_name" property="activeResName" />
<result column="standby_cluster_phy_id" property="standbyClusterPhyId" />
<result column="standby_res_name" property="standbyResName" />
<result column="res_type" property="resType" />
<result column="job_status" property="jobStatus" />
<result column="extend_data" property="extendData" />
</resultMap>
</mapper>

View File

@@ -0,0 +1,15 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xiaojukeji.kafka.manager.dao.ha.JobLogDao">
<resultMap id="JobLogMap" type="com.xiaojukeji.kafka.manager.common.entity.pojo.ha.JobLogDO">
<id column="id" property="id" />
<result column="create_time" property="createTime" />
<result column="modify_time" property="modifyTime" />
<result column="biz_type" property="bizType" />
<result column="biz_keyword" property="bizKeyword" />
<result column="print_time" property="printTime" />
<result column="content" property="content" />
</resultMap>
</mapper>

View File

@@ -16,7 +16,10 @@
<result column="description" property="description" />
</resultMap>
<insert id="insert" parameterType="com.xiaojukeji.kafka.manager.common.entity.pojo.RegionDO">
<insert id="insert"
parameterType="com.xiaojukeji.kafka.manager.common.entity.pojo.RegionDO"
useGeneratedKeys="true"
keyProperty="id">
INSERT INTO region
(name, cluster_id, broker_list, status, description)
VALUES