v2.8.1_e初始化

1、测试代码,开源用户尽量不要使用;
2、包含Kafka-HA的相关功能,在v2.8.0_e的基础上,补充按照clientId切换的功能;
3、基于v2.8.0_e拉的分支;
This commit is contained in:
zengqiao
2023-02-13 16:48:59 +08:00
parent e81c0f3040
commit b16a7b9bff
44 changed files with 1759 additions and 611 deletions

View File

@@ -16,4 +16,6 @@ public interface TopicConnectionDao {
List<TopicConnectionDO> getByTopicName(Long clusterId, String topicName, Date startTime, Date endTime);
List<TopicConnectionDO> getByAppId(String appId, Date startTime, Date endTime);
List<TopicConnectionDO> getByClusterAndAppId(Long clusterId, String appId, Date startTime, Date endTime);
}

View File

@@ -58,4 +58,14 @@ public class TopicConnectionDaoImpl implements TopicConnectionDao {
params.put("endTime", endTime);
return sqlSession.selectList("TopicConnectionDao.getByAppId", params);
}
@Override
public List<TopicConnectionDO> getByClusterAndAppId(Long clusterId, String appId, Date startTime, Date endTime) {
Map<String, Object> params = new HashMap<>(4);
params.put("appId", appId);
params.put("clusterId", clusterId);
params.put("startTime", startTime);
params.put("endTime", endTime);
return sqlSession.selectList("TopicConnectionDao.getByClusterAndAppId", params);
}
}

View File

@@ -10,6 +10,8 @@
<result column="active_cluster_phy_id" property="activeClusterPhyId" />
<result column="standby_cluster_phy_id" property="standbyClusterPhyId" />
<result column="job_status" property="jobStatus" />
<result column="type" property="type" />
<result column="extend_data" property="extendData" />
<result column="operator" property="operator" />
</resultMap>
@@ -18,9 +20,9 @@
useGeneratedKeys="true"
keyProperty="id">
INSERT INTO ks_km_physical_cluster
(active_cluster_phy_id, standby_cluster_phy_id, job_status, operator)
(active_cluster_phy_id, standby_cluster_phy_id, job_status, `type`, extend_data, operator)
VALUES
(#{activeClusterPhyId}, #{standbyClusterPhyId}, #{jobStatus}, #{operator})
(#{activeClusterPhyId}, #{standbyClusterPhyId}, #{jobStatus}, #{type}, #{extendData}, #{operator})
</insert>
<select id="listAllLatest" resultMap="HaASSwitchJobMap">

View File

@@ -10,31 +10,27 @@
<result property="appId" column="app_id"/>
<result property="ip" column="ip"/>
<result property="clientVersion" column="client_version"/>
<result property="clientId" column="client_id"/>
<result property="realConnectTime" column="real_connect_time"/>
<result property="createTime" column="create_time"/>
</resultMap>
<insert id="batchReplace" parameterType="java.util.List">
REPLACE INTO topic_connections (
cluster_id,
topic_name,
`type`,
app_id,
ip,
client_version,
create_time
)
VALUES
<foreach collection="list" item="item" index="index" separator=",">
(
#{item.clusterId},
#{item.topicName},
#{item.type},
#{item.appId},
#{item.ip},
#{item.clientVersion},
#{item.createTime}
)
insert into topic_connections (cluster_id, topic_name, `type`, app_id, ip, client_version, client_id, real_connect_time, create_time)
values
<foreach collection="list" item="item" separator=",">
(#{item.clusterId}, #{item.topicName}, #{item.type}, #{item.appId}, #{item.ip}, #{item.clientVersion}, #{item.clientId}, #{item.realConnectTime}, #{item.createTime})
</foreach>
on duplicate key update
real_connect_time = IF(real_connect_time > VALUES(real_connect_time), real_connect_time, VALUES(real_connect_time)),
cluster_id = VALUES(cluster_id),
topic_name = VALUES(topic_name),
`type` = VALUES(`type`),
app_id = VALUES(app_id),
ip = VALUES(ip),
client_version = VALUES(client_version),
client_id = VALUES(client_id),
create_time = VALUES(create_time)
</insert>
<select id="getByTopicName" parameterType="java.util.Map" resultMap="TopicConnectionMap">
@@ -53,4 +49,14 @@
AND create_time >= #{startTime} AND #{endTime} >= create_time
]]>
</select>
<select id="getByClusterAndAppId" parameterType="java.util.Map" resultMap="TopicConnectionMap">
<![CDATA[
SELECT * FROM topic_connections
WHERE app_id = #{appId}
AND cluster_id = #{clusterId}
AND create_time >= #{startTime}
AND #{endTime} >= create_time
]]>
</select>
</mapper>