From: Kai Moritz Date: Sun, 5 Feb 2023 11:18:18 +0000 (+0100) Subject: splitter: 1.1.2 - De-/Serialization are both using the default-serde X-Git-Url: https://juplo.de/gitweb/?p=demos%2Fkafka%2Fwordcount;a=commitdiff_plain;h=f132033a6793fc566962a4361d7d0a0e852b0f82 splitter: 1.1.2 - De-/Serialization are both using the default-serde - Configured the default-serde for the expeced input and the intended output - The configuration has to be supplied explicitly in the `Properties`, that are handed over during the instanciation of `KafkaStreams`. - The `JsonDeserializer` needs the definition of a default-type for the value and position of trust for the class, that should be instanciated. - The `JsonSerializer` should be advised to _not_ add the header `__TypeId__`, because it is not needed, if the topic only contains messages of a single type. --- diff --git a/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterStreamProcessor.java b/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterStreamProcessor.java index 5e4930d..86d5bbd 100644 --- a/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterStreamProcessor.java +++ b/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterStreamProcessor.java @@ -6,12 +6,12 @@ import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; -import org.apache.kafka.streams.kstream.Consumed; import org.apache.kafka.streams.kstream.KStream; -import org.apache.kafka.streams.kstream.Produced; import org.springframework.boot.SpringApplication; import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.kafka.support.serializer.JsonDeserializer; import org.springframework.kafka.support.serializer.JsonSerde; +import org.springframework.kafka.support.serializer.JsonSerializer; import org.springframework.stereotype.Component; import jakarta.annotation.PostConstruct; @@ -39,12 +39,7 @@ public class SplitterStreamProcessor { StreamsBuilder builder = new StreamsBuilder(); - JsonSerde recordSerde = - new JsonSerde<>(Recording.class).ignoreTypeHeaders(); - - KStream source = builder.stream( - properties.getInputTopic(), - Consumed.with(Serdes.String(), recordSerde)); + KStream source = builder.stream(properties.getInputTopic()); source .flatMapValues(recording -> Arrays @@ -56,8 +51,11 @@ public class SplitterStreamProcessor Properties props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, properties.getApplicationId()); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getBootstrapServer()); - props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); + props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName()); props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class.getName()); + props.put(JsonDeserializer.TRUSTED_PACKAGES, Recording.class.getName() ); + props.put(JsonDeserializer.VALUE_DEFAULT_TYPE, Recording.class.getName()); + props.put(JsonSerializer.ADD_TYPE_INFO_HEADERS, false); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); streams = new KafkaStreams(builder.build(), props);