projects
/
demos
/
kafka
/
wordcount
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
coutner: 1.2.3 - Simplified topology, using default-serdes for all steps
[demos/kafka/wordcount]
/
src
/
main
/
java
/
de
/
juplo
/
kafka
/
wordcount
/
counter
/
CounterStreamProcessor.java
diff --git
a/src/main/java/de/juplo/kafka/wordcount/counter/CounterStreamProcessor.java
b/src/main/java/de/juplo/kafka/wordcount/counter/CounterStreamProcessor.java
index
4cc0c68
..
d64eb68
100644
(file)
--- a/
src/main/java/de/juplo/kafka/wordcount/counter/CounterStreamProcessor.java
+++ b/
src/main/java/de/juplo/kafka/wordcount/counter/CounterStreamProcessor.java
@@
-8,10
+8,8
@@
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.kstream.*;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.kstream.*;
-import org.springframework.kafka.support.serializer.JsonSerde;
import java.util.Properties;
import java.util.Properties;
@@
-48,30
+46,15
@@
public class CounterStreamProcessor
KStream<String, Word> source = builder.stream(
inputTopic,
KStream<String, Word> source = builder.stream(
inputTopic,
- Consumed.with(
- Serdes.String(),
- new JsonSerde<>(Word.class)
- .ignoreTypeHeaders()));
+ Consumed.with(Serdes.String(), null));
source
.map((key, word) -> new KeyValue<>(word, word))
source
.map((key, word) -> new KeyValue<>(word, word))
- .groupByKey(Grouped.with(
- new JsonSerde<>(Word.class)
- .forKeys()
- .noTypeInfo(),
- new JsonSerde<>(Word.class)
- .noTypeInfo()))
+ .groupByKey()
.count(Materialized.as(storeSupplier))
.toStream()
.map((word, count) -> new KeyValue<>(word, WordCount.of(word, count)))
.count(Materialized.as(storeSupplier))
.toStream()
.map((word, count) -> new KeyValue<>(word, WordCount.of(word, count)))
- .to(
- outputTopic,
- Produced.with(
- new JsonSerde<>(Word.class)
- .forKeys()
- .noTypeInfo(),
- new JsonSerde<>(WordCount.class)
- .noTypeInfo()));
+ .to(outputTopic);
Topology topology = builder.build();
log.info("\n\n{}", topology.describe());
Topology topology = builder.build();
log.info("\n\n{}", topology.describe());