mirror of
https://github.com/didi/KnowStreaming.git
synced 2025-12-24 03:42:07 +08:00
采样调整
This commit is contained in:
@@ -142,12 +142,13 @@ public class TopicStateManagerImpl implements TopicStateManager {
|
||||
// 创建kafka-consumer
|
||||
kafkaConsumer = new KafkaConsumer<>(this.generateClientProperties(clusterPhy, dto.getMaxRecords()));
|
||||
|
||||
kafkaConsumer.assign(endOffsetsMapResult.getData().keySet());
|
||||
for (Map.Entry<TopicPartition, Long> entry: endOffsetsMapResult.getData().entrySet()) {
|
||||
kafkaConsumer.seek(entry.getKey(), Math.max(0, entry.getValue() - dto.getMaxRecords()));
|
||||
}
|
||||
|
||||
// 这里需要减去 KafkaConstant.POLL_ONCE_TIMEOUT_UNIT_MS 是因为poll一次需要耗时,如果这里不减去,则可能会导致poll之后,超过要求的时间
|
||||
while (System.currentTimeMillis() - startTime + KafkaConstant.POLL_ONCE_TIMEOUT_UNIT_MS <= dto.getPullTimeoutUnitMs() && voList.size() < dto.getMaxRecords()) {
|
||||
for (Map.Entry<TopicPartition, Long> entry: endOffsetsMapResult.getData().entrySet()) {
|
||||
kafkaConsumer.assign(Arrays.asList(entry.getKey()));
|
||||
kafkaConsumer.seek(entry.getKey(), Math.max(0, entry.getValue() - dto.getMaxRecords()));
|
||||
|
||||
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofMillis(KafkaConstant.POLL_ONCE_TIMEOUT_UNIT_MS));
|
||||
for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
|
||||
if (this.checkIfIgnore(consumerRecord, dto.getFilterKey(), dto.getFilterValue())) {
|
||||
@@ -165,7 +166,6 @@ public class TopicStateManagerImpl implements TopicStateManager {
|
||||
|| voList.size() > dto.getMaxRecords()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Result.buildSuc(voList.subList(0, Math.min(dto.getMaxRecords(), voList.size())));
|
||||
|
||||
@@ -10,5 +10,5 @@ import lombok.NoArgsConstructor;
|
||||
public class RecordHeaderKS {
|
||||
private String key;
|
||||
|
||||
private byte[] value;
|
||||
private String value;
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ import io.swagger.annotations.ApiModelProperty;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.common.header.Header;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
|
||||
public class TopicVOConverter {
|
||||
@@ -51,7 +52,7 @@ public class TopicVOConverter {
|
||||
vo.setValue(consumerRecord.value());
|
||||
vo.setHeaderList(new ArrayList<>());
|
||||
for (Header header : consumerRecord.headers().toArray()) {
|
||||
vo.getHeaderList().add(new RecordHeaderKS(header.key(), header.value()));
|
||||
vo.getHeaderList().add(new RecordHeaderKS(header.key(), new String(header.value(), StandardCharsets.UTF_8)));
|
||||
}
|
||||
return vo;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user