[Optimize]优化Topic元信息更新策略(#806)

This commit is contained in:
zengqiao
2022-12-04 17:53:31 +08:00
committed by EricZeng
parent 2c82baf9fc
commit 4293d05fca
9 changed files with 99 additions and 53 deletions

View File

@@ -14,6 +14,11 @@ import java.io.Serializable;
@NoArgsConstructor
@AllArgsConstructor
public class TopicConfig implements Serializable {
/**
* 表主键ID
*/
private Long id;
/**
* 物理集群ID
*/

View File

@@ -5,6 +5,8 @@ import com.xiaojukeji.know.streaming.km.common.bean.po.BasePO;
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
import lombok.Data;
import java.util.Objects;
@Data
@TableName(Constant.MYSQL_TABLE_NAME_PREFIX + "topic")
public class TopicPO extends BasePO {
@@ -52,4 +54,35 @@ public class TopicPO extends BasePO {
* 备注信息
*/
private String description;
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
TopicPO topicPO = (TopicPO) o;
return Objects.equals(clusterPhyId, topicPO.clusterPhyId)
&& Objects.equals(topicName, topicPO.topicName)
&& Objects.equals(replicaNum, topicPO.replicaNum)
&& Objects.equals(partitionNum, topicPO.partitionNum)
&& Objects.equals(brokerIds, topicPO.brokerIds)
&& Objects.equals(partitionMap, topicPO.partitionMap)
&& Objects.equals(retentionMs, topicPO.retentionMs)
&& Objects.equals(type, topicPO.type)
&& Objects.equals(description, topicPO.description);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), clusterPhyId, topicName, replicaNum, partitionNum, brokerIds, partitionMap, retentionMs, type, description);
}
}

View File

@@ -55,10 +55,6 @@ public class TopicConverter {
* 仅合并Topic的元信息部分业务信息和配置信息部分不合并
*/
public static TopicPO mergeAndOnlyMetadata2NewTopicPO(Topic newTopicData, TopicPO oldDBTopicPO) {
if (newTopicData == null) {
return null;
}
TopicPO newTopicPO = new TopicPO();
newTopicPO.setId(oldDBTopicPO != null? oldDBTopicPO.getId(): null);
@@ -68,6 +64,7 @@ public class TopicConverter {
newTopicPO.setReplicaNum(newTopicData.getReplicaNum());
newTopicPO.setBrokerIds(CommonUtils.intList2String(new ArrayList<>(newTopicData.getBrokerIdSet())));
newTopicPO.setType(newTopicData.getType());
newTopicPO.setPartitionMap(ConvertUtil.obj2Json(newTopicData.getPartitionMap()));
if (newTopicData.getCreateTime() != null) {
newTopicPO.setCreateTime(new Date(newTopicData.getCreateTime()));
@@ -77,8 +74,8 @@ public class TopicConverter {
newTopicPO.setUpdateTime(oldDBTopicPO != null? oldDBTopicPO.getUpdateTime(): new Date());
}
newTopicPO.setPartitionMap(ConvertUtil.obj2Json(newTopicData.getPartitionMap()));
newTopicPO.setDescription(oldDBTopicPO != null? oldDBTopicPO.getDescription(): null);
newTopicPO.setRetentionMs(oldDBTopicPO != null? oldDBTopicPO.getRetentionMs(): null);
return newTopicPO;
}

View File

@@ -22,6 +22,7 @@ public interface TopicService {
* 从DB获取数据
*/
List<Topic> listTopicsFromDB(Long clusterPhyId);
List<TopicPO> listTopicPOsFromDB(Long clusterPhyId);
Topic getTopic(Long clusterPhyId, String topicName);
List<String> listRecentUpdateTopicNamesFromDB(Long clusterPhyId, Integer time); // 获取集群最近新增Topic的topic名称time单位为秒
@@ -39,6 +40,6 @@ public interface TopicService {
int addNewTopic2DB(TopicPO po);
int deleteTopicInDB(Long clusterPhyId, String topicName);
void batchReplaceMetadata(Long clusterPhyId, List<Topic> presentTopicList);
int batchReplaceConfig(Long clusterPhyId, List<TopicConfig> topicConfigList);
int batchReplaceChangedConfig(Long clusterPhyId, List<TopicConfig> topicConfigList);
Result<Void> updatePartitionNum(Long clusterPhyId, String topicName, Integer partitionNum);
}

View File

@@ -10,7 +10,6 @@ import com.xiaojukeji.know.streaming.km.common.bean.entity.param.config.KafkaTop
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.topic.TopicParam;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.ResultStatus;
import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.Topic;
import com.xiaojukeji.know.streaming.km.common.constant.MsgConstant;
import com.xiaojukeji.know.streaming.km.common.constant.KafkaConstant;
import com.xiaojukeji.know.streaming.km.common.constant.kafka.*;
@@ -185,11 +184,9 @@ public class TopicConfigServiceImpl extends BaseVersionControlService implements
private Result<Properties> getTopicConfigByZKClient(Long clusterPhyId, String topicName) {
try {
Topic topic = topicService.getTopic(clusterPhyId, topicName);
KafkaZkClient kafkaZkClient = kafkaAdminZKClient.getClient(clusterPhyId);
Properties properties = kafkaZkClient.getEntityConfigs("topics", topic.getTopicName());
Properties properties = kafkaZkClient.getEntityConfigs("topics", topicName);
for (Object key: properties.keySet()) {
properties.getProperty((String) key);
}
@@ -209,12 +206,10 @@ public class TopicConfigServiceImpl extends BaseVersionControlService implements
try {
AdminClient adminClient = kafkaAdminClient.getClient(param.getClusterPhyId());
Topic metadata = topicService.getTopic(param.getClusterPhyId(), param.getTopicName());
ConfigResource configResource = new ConfigResource(ConfigResource.Type.TOPIC, metadata.getTopicName());
ConfigResource configResource = new ConfigResource(ConfigResource.Type.TOPIC, param.getTopicName());
DescribeConfigsResult describeConfigsResult = adminClient.describeConfigs(
Arrays.asList(configResource),
buildDescribeConfigsOptions()
Collections.singletonList(configResource),
buildDescribeConfigsOptions().timeoutMs(KafkaConstant.ADMIN_CLIENT_REQUEST_TIME_OUT_UNIT_MS)
);
Map<ConfigResource, Config> configMap = describeConfigsResult.all().get();

View File

@@ -101,7 +101,15 @@ public class TopicServiceImpl implements TopicService {
@Override
public List<Topic> listTopicsFromDB(Long clusterPhyId) {
return TopicConverter.convert2TopicList(this.getTopicsFromDB(clusterPhyId));
return TopicConverter.convert2TopicList(this.listTopicPOsFromDB(clusterPhyId));
}
@Override
public List<TopicPO> listTopicPOsFromDB(Long clusterPhyId) {
LambdaQueryWrapper<TopicPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
lambdaQueryWrapper.eq(TopicPO::getClusterPhyId, clusterPhyId);
return topicDAO.selectList(lambdaQueryWrapper);
}
@Override
@@ -182,39 +190,46 @@ public class TopicServiceImpl implements TopicService {
@Override
public void batchReplaceMetadata(Long clusterPhyId, List<Topic> presentTopicList) {
Map<String, Topic> presentTopicMap = presentTopicList.stream().collect(Collectors.toMap(Topic::getTopicName, Function.identity()));
List<TopicPO> dbTopicPOList = this.getTopicsFromDB(clusterPhyId);
Map<String, TopicPO> inDBMap = this.listTopicPOsFromDB(clusterPhyId).stream().collect(Collectors.toMap(TopicPO::getTopicName, Function.identity()));
// 新旧合并
for (TopicPO dbTopicPO: dbTopicPOList) {
Topic topic = presentTopicMap.remove(dbTopicPO.getTopicName());
if (topic == null) {
topicDAO.deleteById(dbTopicPO.getId());
continue;
}
topicDAO.updateById(TopicConverter.mergeAndOnlyMetadata2NewTopicPO(topic, dbTopicPO));
}
// DB中没有的则插入DB
for (Topic topic: presentTopicMap.values()) {
for (Topic presentTopic: presentTopicList) {
try {
topicDAO.insert(TopicConverter.mergeAndOnlyMetadata2NewTopicPO(topic, null));
TopicPO inDBTopicPO = inDBMap.remove(presentTopic.getTopicName());
TopicPO newTopicPO = TopicConverter.mergeAndOnlyMetadata2NewTopicPO(presentTopic, inDBTopicPO);
if (inDBTopicPO == null) {
topicDAO.insert(newTopicPO);
} else if (!newTopicPO.equals(inDBTopicPO)) {
// 有变化时,则进行更新
if (presentTopic.getUpdateTime() == null) {
// 如果原数据的更新时间为null则修改为当前时间
newTopicPO.setUpdateTime(new Date());
}
topicDAO.updateById(newTopicPO);
}
// 无变化时,直接忽略更新
} catch (DuplicateKeyException dke) {
// 忽略key冲突错误多台KM可能同时做insert所以可能出现key冲突
}
}
// DB中没有的则进行删除
inDBMap.values().forEach(elem -> topicDAO.deleteById(elem.getId()));
}
@Override
public int batchReplaceConfig(Long clusterPhyId, List<TopicConfig> topicConfigList) {
public int batchReplaceChangedConfig(Long clusterPhyId, List<TopicConfig> changedConfigList) {
int effectRow = 0;
for (TopicConfig config: topicConfigList) {
for (TopicConfig config: changedConfigList) {
try {
effectRow += topicDAO.updateConfig(ConvertUtil.obj2Obj(config, TopicPO.class));
effectRow += topicDAO.updateConfigById(ConvertUtil.obj2Obj(config, TopicPO.class));
} catch (Exception e) {
log.error("method=batchReplaceConfig||config={}||errMsg=exception!", config, e);
log.error(
"method=batchReplaceConfig||clusterPhyId={}||topicName={}||retentionMs={}||errMsg=exception!",
config.getClusterPhyId(), config.getTopicName(), config.getRetentionMs(), e
);
}
}
@@ -299,11 +314,4 @@ public class TopicServiceImpl implements TopicService {
return topicDAO.selectOne(lambdaQueryWrapper);
}
private List<TopicPO> getTopicsFromDB(Long clusterPhyId) {
LambdaQueryWrapper<TopicPO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
lambdaQueryWrapper.eq(TopicPO::getClusterPhyId, clusterPhyId);
return topicDAO.selectList(lambdaQueryWrapper);
}
}

View File

@@ -8,5 +8,5 @@ import org.springframework.stereotype.Repository;
public interface TopicDAO extends BaseMapper<TopicPO> {
int replaceAll(TopicPO topicPO);
int updateConfig(TopicPO topicPO);
int updateConfigById(TopicPO topicPO);
}

View File

@@ -25,8 +25,8 @@
(#{clusterPhyId}, #{topicName}, #{replicaNum}, #{partitionNum}, #{brokerIds}, #{partitionMap}, #{retentionMs}, #{type}, #{description})
</insert>
<update id="updateConfig" parameterType="com.xiaojukeji.know.streaming.km.common.bean.po.topic.TopicPO">
UPDATE ks_km_topic SET retention_ms = #{retentionMs} WHERE cluster_phy_id = #{clusterPhyId} AND topic_name = #{topicName}
<update id="updateConfigById" parameterType="com.xiaojukeji.know.streaming.km.common.bean.po.topic.TopicPO">
UPDATE ks_km_topic SET retention_ms = #{retentionMs} WHERE id=#{id}
</update>
</mapper>

View File

@@ -7,8 +7,8 @@ import com.didiglobal.logi.log.ILog;
import com.didiglobal.logi.log.LogFactory;
import com.xiaojukeji.know.streaming.km.common.bean.entity.cluster.ClusterPhy;
import com.xiaojukeji.know.streaming.km.common.bean.entity.result.Result;
import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.Topic;
import com.xiaojukeji.know.streaming.km.common.bean.entity.topic.TopicConfig;
import com.xiaojukeji.know.streaming.km.common.bean.po.topic.TopicPO;
import com.xiaojukeji.know.streaming.km.common.utils.ConvertUtil;
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicConfigService;
import com.xiaojukeji.know.streaming.km.core.service.topic.TopicService;
@@ -44,18 +44,25 @@ public class SyncTopicConfigTask extends AbstractAsyncMetadataDispatchTask {
public TaskResult processClusterTask(ClusterPhy clusterPhy, long triggerTimeUnitMs) {
boolean success = true;
List<TopicConfig> topicConfigList = new ArrayList<>();
for (Topic topic: topicService.listTopicsFromDB(clusterPhy.getId())) {
Result<TopicConfig> configResult = this.getTopicConfig(clusterPhy.getId(), topic.getTopicName());
List<TopicConfig> changedConfigList = new ArrayList<>();
for (TopicPO topicPO: topicService.listTopicPOsFromDB(clusterPhy.getId())) {
Result<TopicConfig> configResult = this.getTopicConfig(clusterPhy.getId(), topicPO.getTopicName());
if (configResult.failed()) {
success = false;
continue;
}
topicConfigList.add(configResult.getData());
TopicConfig config = configResult.getData();
if (topicPO.getRetentionMs().equals(config.getRetentionMs())) {
// 数据无变化,不需要加入待更新列表中
continue;
}
topicService.batchReplaceConfig(clusterPhy.getId(), topicConfigList);
config.setId(topicPO.getId());
changedConfigList.add(configResult.getData());
}
topicService.batchReplaceChangedConfig(clusterPhy.getId(), changedConfigList);
return success? TaskResult.SUCCESS: TaskResult.FAIL;
}