splitter: 1.2.0 - A domain-class (``User``) is used as key
[demos/kafka/wordcount] / src / test / java / de / juplo / kafka / wordcount / splitter / SplitterApplicationIT.java
index a702e1d..e945b31 100644 (file)
@@ -1,9 +1,11 @@
 package de.juplo.kafka.wordcount.splitter;
 
-import de.juplo.kafka.wordcount.counter.TestWord;
-import de.juplo.kafka.wordcount.recorder.TestRecording;
+import de.juplo.kafka.wordcount.counter.TestOutputUser;
+import de.juplo.kafka.wordcount.counter.TestOutputWord;
+import de.juplo.kafka.wordcount.recorder.TestInputRecording;
+import de.juplo.kafka.wordcount.recorder.TestInputUser;
 import lombok.extern.slf4j.Slf4j;
-import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Test;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.test.context.SpringBootTest;
@@ -12,6 +14,7 @@ import org.springframework.context.annotation.Bean;
 import org.springframework.kafka.annotation.KafkaListener;
 import org.springframework.kafka.core.KafkaTemplate;
 import org.springframework.kafka.support.KafkaHeaders;
+import org.springframework.kafka.support.SendResult;
 import org.springframework.kafka.test.context.EmbeddedKafka;
 import org.springframework.messaging.handler.annotation.Header;
 import org.springframework.messaging.handler.annotation.Payload;
@@ -20,18 +23,21 @@ import org.springframework.util.MultiValueMap;
 
 import java.time.Duration;
 
-import static de.juplo.kafka.wordcount.splitter.SplitterApplicationIT.*;
+import static de.juplo.kafka.wordcount.splitter.SplitterApplicationIT.TOPIC_IN;
+import static de.juplo.kafka.wordcount.splitter.SplitterApplicationIT.TOPIC_OUT;
 import static org.awaitility.Awaitility.await;
 
 
 @SpringBootTest(
                properties = {
+                               "spring.kafka.producer.key-serializer=org.springframework.kafka.support.serializer.JsonSerializer",
                                "spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer",
                                "spring.kafka.producer.properties.spring.json.add.type.headers=false",
                                "spring.kafka.consumer.auto-offset-reset=earliest",
+                               "spring.kafka.consumer.key-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer",
                                "spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer",
-                               "spring.kafka.consumer.properties.spring.json.value.default.type=de.juplo.kafka.wordcount.counter.TestWord",
-                               "spring.kafka.consumer.properties.spring.json.trusted.packages=de.juplo.kafka.wordcount.splitter",
+                               "spring.kafka.consumer.properties.spring.json.key.default.type=de.juplo.kafka.wordcount.counter.TestOutputUser",
+                               "spring.kafka.consumer.properties.spring.json.value.default.type=de.juplo.kafka.wordcount.counter.TestOutputWord",
                                "logging.level.root=WARN",
                                "logging.level.de.juplo=DEBUG",
                                "juplo.wordcount.splitter.bootstrap-server=${spring.embedded.kafka.brokers}",
@@ -44,24 +50,39 @@ public class SplitterApplicationIT
        public final static String TOPIC_IN = "in";
        public final static String TOPIC_OUT = "out";
 
-       @Autowired
-       KafkaTemplate<String, TestRecording> kafkaTemplate;
        @Autowired
        Consumer consumer;
 
-
-       @BeforeEach
-       public void clear()
+       @BeforeAll
+       public static void testSendMessage(
+                       @Autowired KafkaTemplate<TestInputUser, TestInputRecording> kafkaTemplate)
        {
-               consumer.received.clear();
+               TestData
+                               .getInputMessages()
+                               .forEach(kv ->
+                               {
+                                       try
+                                       {
+                                               SendResult<TestInputUser, TestInputRecording> result = kafkaTemplate.send(TOPIC_IN, kv.key, kv.value).get();
+                                               log.info(
+                                                               "Sent: {}={}, partition={}, offset={}",
+                                                               result.getProducerRecord().key(),
+                                                               result.getProducerRecord().value(),
+                                                               result.getRecordMetadata().partition(),
+                                                               result.getRecordMetadata().offset());
+                                       }
+                                       catch (Exception e)
+                                       {
+                                               throw new RuntimeException(e);
+                                       }
+                               });
        }
 
+
        @Test
        void testSendMessage() throws Exception
        {
-               TestData.writeInputData((user, recording) -> kafkaTemplate.send(TOPIC_IN, user, recording));
-
-               await("Expexted converted data")
+               await("Expected converted data")
                                .atMost(Duration.ofSeconds(5))
                                .untilAsserted(() ->
                                                TestData.assertExpectedMessages(consumer.getReceivedMessages()));
@@ -70,18 +91,18 @@ public class SplitterApplicationIT
 
        static class Consumer
        {
-               private final MultiValueMap<String, TestWord> received = new LinkedMultiValueMap<>();
+               private final MultiValueMap<TestOutputUser, TestOutputWord> received = new LinkedMultiValueMap<>();
 
                @KafkaListener(groupId = "TEST", topics = TOPIC_OUT)
                public synchronized void receive(
-                               @Header(KafkaHeaders.RECEIVED_KEY) String key,
-                               @Payload TestWord value)
+                               @Header(KafkaHeaders.RECEIVED_KEY) TestOutputUser key,
+                               @Payload TestOutputWord value)
                {
                        log.debug("Received message: {}={}", key, value);
                        received.add(key, value);
                }
 
-               synchronized MultiValueMap<String, TestWord> getReceivedMessages()
+               synchronized MultiValueMap<TestOutputUser, TestOutputWord> getReceivedMessages()
                {
                        return received;
                }