counter: 1.2.13 - The tests print out the type-mapping headers
[demos/kafka/wordcount] / src / test / java / de / juplo / kafka / wordcount / counter / CounterApplicationIT.java
index 559b171..78d103c 100644 (file)
@@ -2,8 +2,10 @@ package de.juplo.kafka.wordcount.counter;
 
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.streams.KeyValue;
 import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
 import org.apache.kafka.streams.state.Stores;
 import org.junit.jupiter.api.BeforeEach;
@@ -14,9 +16,9 @@ import org.springframework.boot.test.context.TestConfiguration;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Primary;
 import org.springframework.kafka.annotation.KafkaListener;
-import org.springframework.kafka.core.DefaultKafkaProducerFactory;
-import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.core.ProducerFactory;
+import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
+import org.springframework.kafka.core.*;
+import org.springframework.kafka.support.serializer.JsonDeserializer;
 import org.springframework.kafka.support.serializer.JsonSerializer;
 import org.springframework.kafka.test.context.EmbeddedKafka;
 
@@ -25,10 +27,13 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
-import java.util.stream.Collectors;
 
 import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.*;
-import static org.awaitility.Awaitility.*;
+import static de.juplo.kafka.wordcount.counter.TestData.convertToMap;
+import static de.juplo.kafka.wordcount.counter.TestData.parseHeader;
+import static org.awaitility.Awaitility.await;
+import static org.springframework.kafka.support.mapping.AbstractJavaTypeMapper.*;
+import static org.springframework.kafka.support.mapping.AbstractJavaTypeMapper.DEFAULT_CLASSID_FIELD_NAME;
 
 
 @SpringBootTest(
@@ -74,16 +79,21 @@ public class CounterApplicationIT
        @RequiredArgsConstructor
        static class Consumer
        {
-               private final List<Message> received = new LinkedList<>();
+               private final List<KeyValue<Word, WordCounter>> received = new LinkedList<>();
 
                @KafkaListener(groupId = "TEST", topics = TOPIC_OUT)
-               public synchronized void receive(ConsumerRecord<String, String> record)
+               public synchronized void receive(ConsumerRecord<Word, WordCounter> record)
                {
-                       log.debug("Received message: {}", record);
-                       received.add(Message.of(record.key(),record.value()));
+                       log.debug(
+                                       "Received message: {} -> {}, key: {}, value: {}",
+                                       record.key(),
+                                       record.value(),
+                                       parseHeader(record.headers(), KEY_DEFAULT_CLASSID_FIELD_NAME),
+                                       parseHeader(record.headers(), DEFAULT_CLASSID_FIELD_NAME));
+                       received.add(KeyValue.pair(record.key(),record.value()));
                }
 
-               synchronized List<Message> getReceivedMessages()
+               synchronized List<KeyValue<Word, WordCounter>> getReceivedMessages()
                {
                        return received;
                }
@@ -95,14 +105,7 @@ public class CounterApplicationIT
                @Bean
                ProducerFactory<?, ?> producerFactory(Properties streamProcessorProperties)
                {
-                       Map<String, Object> propertyMap = streamProcessorProperties
-                                       .entrySet()
-                                       .stream()
-                                       .collect(
-                                                       Collectors.toMap(
-                                                                       entry -> (String)entry.getKey(),
-                                                                       entry -> entry.getValue()
-                                                       ));
+                       Map<String, Object> propertyMap = convertToMap(streamProcessorProperties);
 
                        propertyMap.put(
                                        ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
@@ -114,6 +117,30 @@ public class CounterApplicationIT
                        return new DefaultKafkaProducerFactory<>(propertyMap);
                }
 
+               @Bean
+               ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(
+                               Properties streamProcessorProperties)
+               {
+                       Map<String, Object> propertyMap = convertToMap(streamProcessorProperties);
+
+                       propertyMap.put(
+                                       ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
+                                       JsonDeserializer.class.getName());
+                       propertyMap.put(
+                                       ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
+                                       JsonDeserializer.class.getName());
+
+                       ConsumerFactory<? super Object, ? super Object> consumerFactory =
+                                       new DefaultKafkaConsumerFactory<>(propertyMap);
+
+                       ConcurrentKafkaListenerContainerFactory<Object, Object> kafkaListenerContainerFactory =
+                                       new ConcurrentKafkaListenerContainerFactory<>();
+
+                       kafkaListenerContainerFactory.setConsumerFactory(consumerFactory);
+
+                       return kafkaListenerContainerFactory;
+               }
+
                @Bean
                Consumer consumer()
                {