mirror of
https://github.com/didi/KnowStreaming.git
synced 2026-02-07 06:30:49 +08:00
[Optimize]指标采集性能优化-part1(#726)
This commit is contained in:
@@ -0,0 +1,50 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.offset;
|
||||
|
||||
import org.apache.kafka.clients.admin.OffsetSpec;
|
||||
|
||||
/**
|
||||
* @see OffsetSpec
|
||||
*/
|
||||
public class KSOffsetSpec {
|
||||
public static class KSEarliestSpec extends KSOffsetSpec { }
|
||||
|
||||
public static class KSLatestSpec extends KSOffsetSpec { }
|
||||
|
||||
public static class KSTimestampSpec extends KSOffsetSpec {
|
||||
private final long timestamp;
|
||||
|
||||
public KSTimestampSpec(long timestamp) {
|
||||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
public long timestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to retrieve the latest offset of a partition
|
||||
*/
|
||||
public static KSOffsetSpec latest() {
|
||||
return new KSOffsetSpec.KSLatestSpec();
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to retrieve the earliest offset of a partition
|
||||
*/
|
||||
public static KSOffsetSpec earliest() {
|
||||
return new KSOffsetSpec.KSEarliestSpec();
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to retrieve the earliest offset whose timestamp is greater than
|
||||
* or equal to the given timestamp in the corresponding partition
|
||||
* @param timestamp in milliseconds
|
||||
*/
|
||||
public static KSOffsetSpec forTimestamp(long timestamp) {
|
||||
return new KSOffsetSpec.KSTimestampSpec(timestamp);
|
||||
}
|
||||
|
||||
private KSOffsetSpec() {
|
||||
}
|
||||
}
|
||||
@@ -1,23 +1,39 @@
|
||||
package com.xiaojukeji.know.streaming.km.common.bean.entity.param.partition;
|
||||
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.topic.TopicParam;
|
||||
import lombok.Data;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.offset.KSOffsetSpec;
|
||||
import com.xiaojukeji.know.streaming.km.common.bean.entity.param.cluster.ClusterPhyParam;
|
||||
import com.xiaojukeji.know.streaming.km.common.utils.Triple;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import org.apache.kafka.clients.admin.OffsetSpec;
|
||||
import org.apache.kafka.common.TopicPartition;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Data
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
public class PartitionOffsetParam extends TopicParam {
|
||||
private Map<TopicPartition, OffsetSpec> topicPartitionOffsets;
|
||||
public class PartitionOffsetParam extends ClusterPhyParam {
|
||||
private List<Triple<String, KSOffsetSpec, List<TopicPartition>>> offsetSpecList;
|
||||
|
||||
private Long timestamp;
|
||||
public PartitionOffsetParam(Long clusterPhyId, String topicName, KSOffsetSpec ksOffsetSpec, List<TopicPartition> partitionList) {
|
||||
super(clusterPhyId);
|
||||
this.offsetSpecList = Collections.singletonList(new Triple<>(topicName, ksOffsetSpec, partitionList));
|
||||
}
|
||||
|
||||
public PartitionOffsetParam(Long clusterPhyId, String topicName, Map<TopicPartition, OffsetSpec> topicPartitionOffsets, Long timestamp) {
|
||||
super(clusterPhyId, topicName);
|
||||
this.topicPartitionOffsets = topicPartitionOffsets;
|
||||
this.timestamp = timestamp;
|
||||
public PartitionOffsetParam(Long clusterPhyId, String topicName, List<KSOffsetSpec> specList, List<TopicPartition> partitionList) {
|
||||
super(clusterPhyId);
|
||||
this.offsetSpecList = new ArrayList<>();
|
||||
specList.forEach(elem -> offsetSpecList.add(new Triple<>(topicName, elem, partitionList)));
|
||||
}
|
||||
|
||||
public PartitionOffsetParam(Long clusterPhyId, KSOffsetSpec offsetSpec, List<TopicPartition> partitionList) {
|
||||
super(clusterPhyId);
|
||||
Map<String, List<TopicPartition>> tpMap = new HashMap<>();
|
||||
partitionList.forEach(elem -> {
|
||||
tpMap.putIfAbsent(elem.topic(), new ArrayList<>());
|
||||
tpMap.get(elem.topic()).add(elem);
|
||||
});
|
||||
|
||||
this.offsetSpecList = tpMap.entrySet().stream().map(elem -> new Triple<>(elem.getKey(), offsetSpec, elem.getValue())).collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ import com.xiaojukeji.know.streaming.km.common.bean.po.BasePO;
|
||||
import com.xiaojukeji.know.streaming.km.common.constant.Constant;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
@Data
|
||||
@TableName(Constant.MYSQL_TABLE_NAME_PREFIX + "partition")
|
||||
public class PartitionPO extends BasePO {
|
||||
@@ -37,4 +39,31 @@ public class PartitionPO extends BasePO {
|
||||
* AR
|
||||
*/
|
||||
private String assignReplicas;
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
if (!super.equals(o)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PartitionPO po = (PartitionPO) o;
|
||||
return Objects.equals(clusterPhyId, po.clusterPhyId)
|
||||
&& Objects.equals(topicName, po.topicName)
|
||||
&& Objects.equals(partitionId, po.partitionId)
|
||||
&& Objects.equals(leaderBrokerId, po.leaderBrokerId)
|
||||
&& Objects.equals(inSyncReplicas, po.inSyncReplicas)
|
||||
&& Objects.equals(assignReplicas, po.assignReplicas);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), clusterPhyId, topicName, partitionId, leaderBrokerId, inSyncReplicas, assignReplicas);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ public class KafkaConstant {
|
||||
|
||||
public static final Integer DATA_VERSION_ONE = 1;
|
||||
|
||||
public static final Integer ADMIN_CLIENT_REQUEST_TIME_OUT_UNIT_MS = 5000;
|
||||
public static final Integer ADMIN_CLIENT_REQUEST_TIME_OUT_UNIT_MS = 10000;
|
||||
|
||||
public static final Integer KAFKA_SASL_SCRAM_ITERATIONS = 8192;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user