X-Git-Url: https://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Ftest%2Fjava%2Fde%2Fjuplo%2Fkafka%2Fwordcount%2Fcounter%2FCounterApplicationIT.java;h=2f1e0c3a2b2b9a02577264c7dc43c311e904dba4;hb=44f1ad5dcd50851ef5d93b1be759481d5a38f63a;hp=5a3507ac0611cef4a87e34288b4a2cca7b9af675;hpb=35943a021ac4b25fad29861ecc083b6974ea2732;p=demos%2Fkafka%2Fwordcount diff --git a/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java b/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java index 5a3507a..2f1e0c3 100644 --- a/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java +++ b/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java @@ -1,11 +1,9 @@ package de.juplo.kafka.wordcount.counter; -import lombok.RequiredArgsConstructor; +import de.juplo.kafka.wordcount.splitter.TestInputWord; +import de.juplo.kafka.wordcount.top10.TestOutputWord; +import de.juplo.kafka.wordcount.top10.TestOutputWordCounter; import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; import org.apache.kafka.streams.state.Stores; import org.junit.jupiter.api.BeforeEach; @@ -16,41 +14,45 @@ import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Primary; import org.springframework.kafka.annotation.KafkaListener; -import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; -import org.springframework.kafka.core.*; -import org.springframework.kafka.support.serializer.JsonDeserializer; -import org.springframework.kafka.support.serializer.JsonSerializer; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.support.KafkaHeaders; import org.springframework.kafka.test.context.EmbeddedKafka; +import org.springframework.messaging.handler.annotation.Header; +import org.springframework.messaging.handler.annotation.Payload; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; import java.time.Duration; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.*; -import static de.juplo.kafka.wordcount.counter.TestData.convertToMap; -import static org.awaitility.Awaitility.*; +import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.TOPIC_IN; +import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.TOPIC_OUT; +import static org.awaitility.Awaitility.await; @SpringBootTest( properties = { - "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}", + "spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer", + "spring.kafka.producer.properties.spring.json.add.type.headers=false", + "spring.kafka.consumer.auto-offset-reset=earliest", + "spring.kafka.consumer.key-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer", + "spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer", + "spring.kafka.consumer.properties.spring.json.type.mapping=word:de.juplo.kafka.wordcount.top10.TestOutputWord,counter:de.juplo.kafka.wordcount.top10.TestOutputWordCounter", + "logging.level.root=WARN", + "logging.level.de.juplo=DEBUG", "juplo.wordcount.counter.bootstrap-server=${spring.embedded.kafka.brokers}", "juplo.wordcount.counter.commit-interval=0", "juplo.wordcount.counter.cacheMaxBytes=0", "juplo.wordcount.counter.input-topic=" + TOPIC_IN, "juplo.wordcount.counter.output-topic=" + TOPIC_OUT }) -@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT }, partitions = PARTITIONS) +@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT }) @Slf4j public class CounterApplicationIT { - public final static String TOPIC_IN = "in"; - public final static String TOPIC_OUT = "out"; - static final int PARTITIONS = 2; + public static final String TOPIC_IN = "in"; + public static final String TOPIC_OUT = "out"; @Autowired - KafkaTemplate kafkaTemplate; + KafkaTemplate kafkaTemplate; @Autowired Consumer consumer; @@ -65,27 +67,30 @@ public class CounterApplicationIT @Test void testSendMessage() { - TestData.writeInputData((key, value) -> kafkaTemplate.send(TOPIC_IN, key, value)); + TestData + .getInputMessages() + .forEach(kv -> kafkaTemplate.send(TOPIC_IN, kv.key, kv.value)); - await("Expexted converted data") + await("Expected messages") .atMost(Duration.ofSeconds(10)) - .untilAsserted(() -> TestData.assertExpectedResult(consumer.getReceivedMessages())); + .untilAsserted(() -> TestData.assertExpectedMessages(consumer.getReceivedMessages())); } - @RequiredArgsConstructor static class Consumer { - private final List> received = new LinkedList<>(); + private final MultiValueMap received = new LinkedMultiValueMap<>(); @KafkaListener(groupId = "TEST", topics = TOPIC_OUT) - public synchronized void receive(ConsumerRecord record) + public synchronized void receive( + @Header(KafkaHeaders.RECEIVED_KEY) TestOutputWord word, + @Payload TestOutputWordCounter counter) { - log.debug("Received message: {}", record); - received.add(KeyValue.pair(record.key(),record.value())); + log.debug("Received message: {} -> {}", word, counter); + received.add(word, counter); } - synchronized List> getReceivedMessages() + synchronized MultiValueMap getReceivedMessages() { return received; } @@ -94,45 +99,6 @@ public class CounterApplicationIT @TestConfiguration static class Configuration { - @Bean - ProducerFactory producerFactory(Properties streamProcessorProperties) - { - Map propertyMap = convertToMap(streamProcessorProperties); - - propertyMap.put( - ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, - JsonSerializer.class.getName()); - propertyMap.put( - ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, - JsonSerializer.class.getName()); - - return new DefaultKafkaProducerFactory<>(propertyMap); - } - - @Bean - ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory( - Properties streamProcessorProperties) - { - Map propertyMap = convertToMap(streamProcessorProperties); - - propertyMap.put( - ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, - JsonDeserializer.class.getName()); - propertyMap.put( - ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, - JsonDeserializer.class.getName()); - - ConsumerFactory consumerFactory = - new DefaultKafkaConsumerFactory<>(propertyMap); - - ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory = - new ConcurrentKafkaListenerContainerFactory<>(); - - kafkaListenerContainerFactory.setConsumerFactory(consumerFactory); - - return kafkaListenerContainerFactory; - } - @Bean Consumer consumer() {