counter: 1.2.15 - `TestData` only holds and asserts the test-data
[demos/kafka/wordcount] / src / test / java / de / juplo / kafka / wordcount / counter / CounterApplicationIT.java
index c6eb0a6..ad4faf2 100644 (file)
@@ -1,8 +1,6 @@
 package de.juplo.kafka.wordcount.counter;
 
-import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
 import org.apache.kafka.streams.state.Stores;
 import org.junit.jupiter.api.BeforeEach;
@@ -14,34 +12,47 @@ import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Primary;
 import org.springframework.kafka.annotation.KafkaListener;
 import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.support.KafkaHeaders;
 import org.springframework.kafka.test.context.EmbeddedKafka;
+import org.springframework.messaging.handler.annotation.Header;
+import org.springframework.messaging.handler.annotation.Payload;
+import org.springframework.util.LinkedMultiValueMap;
+import org.springframework.util.MultiValueMap;
 
 import java.time.Duration;
-import java.util.LinkedList;
-import java.util.List;
+import java.util.stream.Stream;
 
-import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.*;
-import static org.awaitility.Awaitility.*;
+import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.TOPIC_IN;
+import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.TOPIC_OUT;
+import static org.awaitility.Awaitility.await;
 
 
 @SpringBootTest(
                properties = {
-                               "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
+                               "spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer",
+                               "spring.kafka.producer.properties.spring.json.add.type.headers=false",
+                               "spring.kafka.consumer.auto-offset-reset=earliest",
+                               "spring.kafka.consumer.key-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer",
+                               "spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer",
+                               "spring.kafka.consumer.properties.spring.json.use.type.headers=false",
+                               "spring.kafka.consumer.properties.spring.json.key.default.type=de.juplo.kafka.wordcount.counter.Word",
+                               "spring.kafka.consumer.properties.spring.json.value.default.type=de.juplo.kafka.wordcount.counter.WordCounter",
+                               "logging.level.root=WARN",
+                               "logging.level.de.juplo=DEBUG",
                                "juplo.wordcount.counter.bootstrap-server=${spring.embedded.kafka.brokers}",
                                "juplo.wordcount.counter.commit-interval=0",
                                "juplo.wordcount.counter.cacheMaxBytes=0",
                                "juplo.wordcount.counter.input-topic=" + TOPIC_IN,
                                "juplo.wordcount.counter.output-topic=" + TOPIC_OUT })
-@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT }, partitions = PARTITIONS)
+@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT })
 @Slf4j
 public class CounterApplicationIT
 {
-       public final static String TOPIC_IN = "in";
-       public final static String TOPIC_OUT = "out";
-       static final int PARTITIONS = 2;
+       public static final String TOPIC_IN = "in";
+       public static final String TOPIC_OUT = "out";
 
        @Autowired
-       KafkaTemplate<String, String> kafkaTemplate;
+       KafkaTemplate<String, Word> kafkaTemplate;
        @Autowired
        Consumer consumer;
 
@@ -56,24 +67,32 @@ public class CounterApplicationIT
        @Test
        void testSendMessage()
        {
-               TestData.writeInputData((key, value) -> kafkaTemplate.send(TOPIC_IN, key, value));
+               Stream
+                               .of(TestData.INPUT_MESSAGES)
+                               .forEach(word -> kafkaTemplate.send(TOPIC_IN, word.getUser(), word));
 
-               await("Expexted converted data")
+               await("Expected messages")
                                .atMost(Duration.ofSeconds(10))
-                               .untilAsserted(() -> TestData.assertExpectedResult(consumer.received));
+                               .untilAsserted(() -> TestData.assertExpectedMessages(consumer.getReceivedMessages()));
        }
 
 
-       @RequiredArgsConstructor
        static class Consumer
        {
-               private final List<Message> received = new LinkedList<>();
+               private final MultiValueMap<Word, WordCounter> received = new LinkedMultiValueMap<>();
 
                @KafkaListener(groupId = "TEST", topics = TOPIC_OUT)
-               public void receive(ConsumerRecord<String, String> record)
+               public synchronized void receive(
+                               @Header(KafkaHeaders.RECEIVED_KEY) Word word,
+                               @Payload WordCounter counter)
                {
-                       log.debug("Received message: {}", record);
-                       received.add(Message.of(record.key(),record.value()));
+                       log.debug("Received message: {} -> {}", word, counter);
+                       received.add(word, counter);
+               }
+
+               synchronized MultiValueMap<Word, WordCounter> getReceivedMessages()
+               {
+                       return received;
                }
        }