Kafka Streams 如何获取kafka headers
Kafka Streams how to get the kafka headers
我有下面的kafka流代码
public class KafkaStreamHandler implements Processor<String, String>{
private ProcessorContext context;
@Override
public void init(ProcessorContext context) {
// TODO Auto-generated method stub
this.context = context;
}
public KeyValue<String, KafkaStatusRecordWrapper> process(String key, String value) {
Headers contexts = context.headers();
contexts.forEach(header -> System.out.println(header));
}
public void StartFailstreamHandler() {
StreamsBuilder builder = new StreamsBuilder();
KStream<String, String> userStream = builder.stream("usertopic",Consumed.with(Serdes.String(), Serdes.String()));
Properties props = new Properties();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, "failed-streams-userstream");
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "ALL my bootstrap servers);
props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 4);
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "500");
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
//consumer_timeout_ms
props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 2000);
props.put("state.dir","/tmp/kafka/stat));
userStream.peek((key,value)->System.out.println("key :"+key+" value :"+value));
/* take few descsion based on Header */
/* How to get the Header */
userStream.map(this::process);
KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), props);
kafkaStreams.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
logger.error("Thread Name :" + t.getName() + " Error while processing:", e);
}
});
kafkaStreams.cleanUp();
kafkaStreams.start();
}
}
现在我们的一位客户正在发送有关 kafka header 的版本信息,如下所示。
ProducerRecord<Integer, String> record = new ProducerRecord<Integer, String>("topic", 1, "message");
record.headers().add(new RecordHeader("version", "v1".getBytes()));
producer.send(record);
基于此 header 我需要为我的消息选择解析器,如何使用 KStream 运算符读取此 header?
我已经看到了流的所有 API 但没有方法给出 header
我无法更改为普通的 kakfa 消费者,因为我的应用程序已经依赖于几个 KStream APIs ..
处理器不允许您在下游 DSL 中链接新运算符,您应该使用 transformValues 以便继续使用 Stream DSL:
- 首先从 ValueTransformerWithKey 中提取 Headers
public class ExtractHeaderThenDoSomethingTransformer implements ValueTransformerWithKey<String, String, String> {
ProcessorContext context;
@Override
public void init(ProcessorContext context) {
this.context = context;
}
@Override
public String transform(String readOnlyKey, String value) {
Headers headers = context.headers();
/* take few descsion based on Header: if you want to filter base on then just return null then chaining another filter operator after transformValues*/
/* How to get the Header */
return value;
}
@Override
public void close() {
}
}
- 像这样将 ExtractHeaderThenDoSomethingTransformer 添加到您的拓扑中:
userStream
.transformValues(ExtractHeaderThenDoSomethingTransformer::new)
.map(this::processs);
我们可以从上下文中得到headers
userStream.to { key, value, recordContext ->
recordContext.headers()
destinationTopic
}
我有下面的kafka流代码
public class KafkaStreamHandler implements Processor<String, String>{
private ProcessorContext context;
@Override
public void init(ProcessorContext context) {
// TODO Auto-generated method stub
this.context = context;
}
public KeyValue<String, KafkaStatusRecordWrapper> process(String key, String value) {
Headers contexts = context.headers();
contexts.forEach(header -> System.out.println(header));
}
public void StartFailstreamHandler() {
StreamsBuilder builder = new StreamsBuilder();
KStream<String, String> userStream = builder.stream("usertopic",Consumed.with(Serdes.String(), Serdes.String()));
Properties props = new Properties();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, "failed-streams-userstream");
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "ALL my bootstrap servers);
props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 4);
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "500");
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
//consumer_timeout_ms
props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 2000);
props.put("state.dir","/tmp/kafka/stat));
userStream.peek((key,value)->System.out.println("key :"+key+" value :"+value));
/* take few descsion based on Header */
/* How to get the Header */
userStream.map(this::process);
KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), props);
kafkaStreams.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
logger.error("Thread Name :" + t.getName() + " Error while processing:", e);
}
});
kafkaStreams.cleanUp();
kafkaStreams.start();
}
}
现在我们的一位客户正在发送有关 kafka header 的版本信息,如下所示。
ProducerRecord<Integer, String> record = new ProducerRecord<Integer, String>("topic", 1, "message");
record.headers().add(new RecordHeader("version", "v1".getBytes()));
producer.send(record);
基于此 header 我需要为我的消息选择解析器,如何使用 KStream 运算符读取此 header? 我已经看到了流的所有 API 但没有方法给出 header
我无法更改为普通的 kakfa 消费者,因为我的应用程序已经依赖于几个 KStream APIs ..
处理器不允许您在下游 DSL 中链接新运算符,您应该使用 transformValues 以便继续使用 Stream DSL:
- 首先从 ValueTransformerWithKey 中提取 Headers
public class ExtractHeaderThenDoSomethingTransformer implements ValueTransformerWithKey<String, String, String> {
ProcessorContext context;
@Override
public void init(ProcessorContext context) {
this.context = context;
}
@Override
public String transform(String readOnlyKey, String value) {
Headers headers = context.headers();
/* take few descsion based on Header: if you want to filter base on then just return null then chaining another filter operator after transformValues*/
/* How to get the Header */
return value;
}
@Override
public void close() {
}
}
- 像这样将 ExtractHeaderThenDoSomethingTransformer 添加到您的拓扑中:
userStream
.transformValues(ExtractHeaderThenDoSomethingTransformer::new)
.map(this::processs);
我们可以从上下文中得到headers
userStream.to { key, value, recordContext ->
recordContext.headers()
destinationTopic
}