X-Git-Url: https://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Ftest%2Fjava%2Fde%2Fjuplo%2Fkafka%2Fwordcount%2Fcounter%2FCounterApplicationIT.java;h=992164c3a040418838800b0c06a0c84bdbbed282;hb=a5146f975d5383dd2ec046478f20937d821dfa51;hp=fd920006ed9811b60d5f868be2a90d1def1a1d6d;hpb=f8f9b6397ba0096bffa463e09a2db93277a3f9cf;p=demos%2Fkafka%2Fwordcount diff --git a/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java b/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java index fd92000..992164c 100644 --- a/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java +++ b/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java @@ -1,11 +1,8 @@ package de.juplo.kafka.wordcount.counter; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import lombok.RequiredArgsConstructor; -import lombok.Value; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; import org.apache.kafka.streams.state.Stores; import org.junit.jupiter.api.BeforeEach; @@ -18,36 +15,45 @@ import org.springframework.context.annotation.Primary; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.kafka.test.context.EmbeddedKafka; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; import java.time.Duration; +import java.util.LinkedList; +import java.util.List; -import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.*; -import static org.assertj.core.api.Assertions.assertThat; -import static org.awaitility.Awaitility.*; +import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.TOPIC_IN; +import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.TOPIC_OUT; +import static de.juplo.kafka.wordcount.counter.TestData.parseHeader; +import static org.awaitility.Awaitility.await; +import static org.springframework.kafka.support.mapping.AbstractJavaTypeMapper.DEFAULT_CLASSID_FIELD_NAME; +import static org.springframework.kafka.support.mapping.AbstractJavaTypeMapper.KEY_DEFAULT_CLASSID_FIELD_NAME; @SpringBootTest( properties = { - "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}", + "spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer", + "spring.kafka.producer.properties.spring.json.add.type.headers=false", + "spring.kafka.consumer.auto-offset-reset=earliest", + "spring.kafka.consumer.key-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer", + "spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer", + "spring.kafka.consumer.properties.spring.json.use.type.headers=false", + "spring.kafka.consumer.properties.spring.json.key.default.type=de.juplo.kafka.wordcount.counter.Word", + "spring.kafka.consumer.properties.spring.json.value.default.type=de.juplo.kafka.wordcount.counter.WordCounter", + "logging.level.root=WARN", + "logging.level.de.juplo=DEBUG", "juplo.wordcount.counter.bootstrap-server=${spring.embedded.kafka.brokers}", "juplo.wordcount.counter.commit-interval=0", "juplo.wordcount.counter.cacheMaxBytes=0", "juplo.wordcount.counter.input-topic=" + TOPIC_IN, "juplo.wordcount.counter.output-topic=" + TOPIC_OUT }) -@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT }, partitions = PARTITIONS) +@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT }) @Slf4j public class CounterApplicationIT { - public final static String TOPIC_IN = "in"; - public final static String TOPIC_OUT = "out"; - static final int PARTITIONS = 2; + public static final String TOPIC_IN = "in"; + public static final String TOPIC_OUT = "out"; @Autowired - KafkaTemplate kafkaTemplate; - @Autowired - ObjectMapper mapper; + KafkaTemplate kafkaTemplate; @Autowired Consumer consumer; @@ -60,95 +66,45 @@ public class CounterApplicationIT @Test - void testSendMessage() throws Exception + void testSendMessage() { - kafkaTemplate.send(TOPIC_IN, "peter", "Hallo"); - kafkaTemplate.send(TOPIC_IN, "klaus", "Müsch"); - kafkaTemplate.send(TOPIC_IN, "peter", "Welt"); - kafkaTemplate.send(TOPIC_IN, "klaus", "Müsch"); - kafkaTemplate.send(TOPIC_IN, "klaus", "s"); - kafkaTemplate.send(TOPIC_IN, "peter", "Boäh"); - kafkaTemplate.send(TOPIC_IN, "peter", "Welt"); - kafkaTemplate.send(TOPIC_IN, "peter", "Boäh"); - kafkaTemplate.send(TOPIC_IN, "klaus", "s"); - kafkaTemplate.send(TOPIC_IN, "peter", "Boäh"); - kafkaTemplate.send(TOPIC_IN, "klaus", "s"); - - Message peter1 = Message.of( - "{\"username\":\"peter\",\"word\":\"Hallo\"}", - "1"); - Message peter2 = Message.of( - "{\"username\":\"peter\",\"word\":\"Welt\"}", - "1"); - Message peter3 = Message.of( - "{\"username\":\"peter\",\"word\":\"Boäh\"}", - "1"); - Message peter4 = Message.of( - "{\"username\":\"peter\",\"word\":\"Welt\"}", - "2"); - Message peter5 = Message.of( - "{\"username\":\"peter\",\"word\":\"Boäh\"}", - "2"); - Message peter6 = Message.of( - "{\"username\":\"peter\",\"word\":\"Boäh\"}", - "3"); - - Message klaus1 = Message.of( - "{\"username\":\"klaus\",\"word\":\"Müsch\"}", - "1"); - Message klaus2 = Message.of( - "{\"username\":\"klaus\",\"word\":\"Müsch\"}", - "2"); - Message klaus3 = Message.of( - "{\"username\":\"klaus\",\"word\":\"s\"}", - "1"); - Message klaus4 = Message.of( - "{\"username\":\"klaus\",\"word\":\"s\"}", - "2"); - Message klaus5 = Message.of( - "{\"username\":\"klaus\",\"word\":\"s\"}", - "3"); + TestData.writeInputData((key, value) -> kafkaTemplate.send(TOPIC_IN, key, value)); await("Expexted converted data") .atMost(Duration.ofSeconds(10)) - .untilAsserted(() -> - { - assertThat(consumer.received).hasSize(2); - assertThat(consumer.received.get("klaus")).containsExactly(klaus1, klaus2, klaus3, klaus4, klaus5); - assertThat(consumer.received.get("peter")).containsExactly(peter1, peter2, peter3, peter4, peter5, peter6); - }); + .untilAsserted(() -> TestData.assertExpectedResult(consumer.getReceivedMessages())); } - @RequiredArgsConstructor static class Consumer { - private final MultiValueMap received = new LinkedMultiValueMap<>(); - private final ObjectMapper mapper; + private final List> received = new LinkedList<>(); @KafkaListener(groupId = "TEST", topics = TOPIC_OUT) - public void receive(ConsumerRecord record) throws JsonProcessingException + public synchronized void receive(ConsumerRecord record) { - log.debug("Received message: {}", record); - Key key = mapper.readValue(record.key(), Key.class); - received.add(key.getUsername(), Message.of(record.key(),record.value())); + log.debug( + "Received message: {} -> {}, key: {}, value: {}", + record.key(), + record.value(), + parseHeader(record.headers(), KEY_DEFAULT_CLASSID_FIELD_NAME), + parseHeader(record.headers(), DEFAULT_CLASSID_FIELD_NAME)); + received.add(KeyValue.pair(record.key(),record.value())); } - } - @Value(staticConstructor = "of") - static class Message - { - String key; - String value; + synchronized List> getReceivedMessages() + { + return received; + } } @TestConfiguration static class Configuration { @Bean - Consumer consumer(ObjectMapper mapper) + Consumer consumer() { - return new Consumer(mapper); + return new Consumer(); } @Primary