X-Git-Url: https://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Ftest%2Fjava%2Fde%2Fjuplo%2Fkafka%2FApplicationTests.java;h=51d579eabab4ae0c6765b80f852bc2827a11a88e;hb=c6a33a3c27563018d99a56fe4069f20de64e9f4c;hp=5e439d6947bddf94c80bb3fdf6b034d0e709b888;hpb=786482f3457e9ef966b7847c605d176fc6915884;p=demos%2Fkafka%2Ftraining diff --git a/src/test/java/de/juplo/kafka/ApplicationTests.java b/src/test/java/de/juplo/kafka/ApplicationTests.java index 5e439d6..51d579e 100644 --- a/src/test/java/de/juplo/kafka/ApplicationTests.java +++ b/src/test/java/de/juplo/kafka/ApplicationTests.java @@ -1,203 +1,84 @@ package de.juplo.kafka; -import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.TopicPartition; -import org.apache.kafka.common.errors.WakeupException; -import org.apache.kafka.common.serialization.BytesSerializer; import org.apache.kafka.common.serialization.LongSerializer; import org.apache.kafka.common.serialization.StringSerializer; import org.apache.kafka.common.utils.Bytes; -import org.junit.jupiter.api.MethodOrderer; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestMethodOrder; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.ConfigDataApplicationContextInitializer; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Import; -import org.springframework.kafka.test.context.EmbeddedKafka; -import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.context.annotation.Primary; +import org.springframework.test.context.ContextConfiguration; -import java.util.*; -import java.util.concurrent.ExecutorService; -import java.util.function.BiConsumer; +import java.util.Set; import java.util.function.Consumer; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import static de.juplo.kafka.ApplicationTests.PARTITIONS; -import static de.juplo.kafka.ApplicationTests.TOPIC; -import static org.assertj.core.api.Assertions.assertThat; - -@SpringJUnitConfig(initializers = ConfigDataApplicationContextInitializer.class) -@TestMethodOrder(MethodOrderer.OrderAnnotation.class) -@TestPropertySource( - properties = { - "consumer.bootstrap-server=${spring.embedded.kafka.brokers}", - "consumer.topic=" + TOPIC }) -@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS) -@Slf4j -class ApplicationTests +@ContextConfiguration(classes = ApplicationTests.Configuration.class) +public class ApplicationTests extends GenericApplicationTests { - public static final String TOPIC = "FOO"; - public static final int PARTITIONS = 10; - - - StringSerializer stringSerializer = new StringSerializer(); - LongSerializer longSerializer = new LongSerializer(); - - @Autowired - KafkaProducer kafkaProducer; - @Autowired - KafkaConsumer kafkaConsumer; - @Autowired - ApplicationProperties properties; - @Autowired - ExecutorService executor; - - - @Test - @Order(1) // << The poistion pill is not skipped. Hence, this test must run first - void commitsCurrentOffsetsOnSuccess() - { - send100Messages(i -> new Bytes(longSerializer.serialize(TOPIC, i))); - - Set> received = new HashSet<>(); - Map offsets = runEndlessConsumer(record -> - { - received.add(record); - if (received.size() == 100) - throw new WakeupException(); - }); - - check(offsets); - } - - @Test - @Order(2) - void commitsNoOffsetsOnError() - { - send100Messages(counter -> - counter == 77 - ? new Bytes(stringSerializer.serialize(TOPIC, "BOOM!")) - : new Bytes(longSerializer.serialize(TOPIC, counter))); - - Map oldOffsets = new HashMap<>(); - doForCurrentOffsets((tp, offset) -> oldOffsets.put(tp.partition(), offset -1)); - Map newOffsets = runEndlessConsumer((record) -> {}); - - check(oldOffsets); - } - - - void send100Messages(Function messageGenerator) - { - long i = 0; - - for (int partition = 0; partition < 10; partition++) - { - for (int key = 0; key < 10; key++) - { - Bytes value = messageGenerator.apply(++i); - - ProducerRecord record = - new ProducerRecord<>( - TOPIC, - partition, - Integer.toString(key%2), - value); - - kafkaProducer.send(record, (metadata, e) -> - { - if (metadata != null) - { - log.debug( - "{}|{} - {}={}", - metadata.partition(), - metadata.offset(), - record.key(), - record.value()); - } - else - { - log.warn( - "Exception for {}={}: {}", - record.key(), - record.value(), - e.toString()); - } - }); - } - } - } - - Map runEndlessConsumer(Consumer> consumer) - { - Map offsets = new HashMap<>(); - doForCurrentOffsets((tp, offset) -> offsets.put(tp.partition(), offset -1)); - Consumer> captureOffset = record -> offsets.put(record.partition(), record.offset()); - EndlessConsumer endlessConsumer = - new EndlessConsumer<>( - executor, - properties.getClientId(), - properties.getTopic(), - kafkaConsumer, - captureOffset.andThen(consumer)); - - endlessConsumer.run(); - - return offsets; - } - - List partitions() - { - return - IntStream - .range(0, PARTITIONS) - .mapToObj(partition -> new TopicPartition(TOPIC, partition)) - .collect(Collectors.toList()); - } - - void doForCurrentOffsets(BiConsumer consumer) - { - kafkaConsumer.assign(partitions()); - partitions().forEach(tp -> consumer.accept(tp, kafkaConsumer.position(tp))); - kafkaConsumer.unsubscribe(); - } - - void check(Map offsets) - { - doForCurrentOffsets((tp, offset) -> - { - Long expected = offsets.get(tp.partition()) + 1; - log.debug("Checking, if the offset for {} is {}", tp, expected); - assertThat(offset).isEqualTo(expected); - }); - } - - - @TestConfiguration - @Import(ApplicationConfiguration.class) - public static class Configuration - { - @Bean - KafkaProducer kafkaProducer(ApplicationProperties properties) - { - Properties props = new Properties(); - props.put("bootstrap.servers", properties.getBootstrapServer()); - props.put("linger.ms", 100); - props.put("key.serializer", StringSerializer.class.getName()); - props.put("value.serializer", BytesSerializer.class.getName()); - - return new KafkaProducer<>(props); - } - } + public ApplicationTests() + { + super( + new RecordGenerator() + { + final StringSerializer stringSerializer = new StringSerializer(); + final LongSerializer longSerializer = new LongSerializer(); + + + @Override + public void generate( + int numberOfMessagesToGenerate, + Set poisonPills, + Set logicErrors, + Consumer> messageSender) + { + int i = 0; + + for (int partition = 0; partition < 10; partition++) + { + for (int key = 0; key < 10; key++) + { + if (++i > numberOfMessagesToGenerate) + return; + + Bytes value = new Bytes(longSerializer.serialize(TOPIC, (long)i)); + if (logicErrors.contains(i)) + { + value = new Bytes(longSerializer.serialize(TOPIC, Long.MIN_VALUE)); + } + if (poisonPills.contains(i)) + { + value = new Bytes(stringSerializer.serialize(TOPIC, "BOOM (Poison-Pill)!")); + } + + ProducerRecord record = + new ProducerRecord<>( + TOPIC, + partition, + new Bytes(stringSerializer.serialize(TOPIC,Integer.toString(partition*10+key%2))), + value); + + messageSender.accept(record); + } + } + } + }); + } + + + @TestConfiguration + public static class Configuration + { + @Primary + @Bean + public Consumer> consumer() + { + return (record) -> + { + if (record.value() == Long.MIN_VALUE) + throw new RuntimeException("BOOM (Logic-Error)!"); + }; + } + } }