1 package de.juplo.kafka;
3 import lombok.extern.slf4j.Slf4j;
4 import org.apache.kafka.clients.consumer.ConsumerRecord;
5 import org.apache.kafka.clients.consumer.KafkaConsumer;
6 import org.apache.kafka.clients.producer.KafkaProducer;
7 import org.apache.kafka.clients.producer.ProducerRecord;
8 import org.apache.kafka.common.TopicPartition;
9 import org.apache.kafka.common.errors.WakeupException;
10 import org.apache.kafka.common.serialization.BytesSerializer;
11 import org.apache.kafka.common.serialization.LongSerializer;
12 import org.apache.kafka.common.serialization.StringSerializer;
13 import org.apache.kafka.common.utils.Bytes;
14 import org.junit.jupiter.api.Test;
15 import org.springframework.beans.factory.annotation.Autowired;
16 import org.springframework.boot.test.context.ConfigDataApplicationContextInitializer;
17 import org.springframework.boot.test.context.TestConfiguration;
18 import org.springframework.context.annotation.Bean;
19 import org.springframework.context.annotation.Import;
20 import org.springframework.kafka.test.context.EmbeddedKafka;
21 import org.springframework.test.context.TestPropertySource;
22 import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
25 import java.util.concurrent.ExecutorService;
26 import java.util.function.BiConsumer;
27 import java.util.function.Consumer;
28 import java.util.function.Function;
29 import java.util.stream.Collectors;
30 import java.util.stream.IntStream;
32 import static de.juplo.kafka.ApplicationTests.PARTITIONS;
33 import static de.juplo.kafka.ApplicationTests.TOPIC;
34 import static org.assertj.core.api.Assertions.assertThat;
37 @SpringJUnitConfig(initializers = ConfigDataApplicationContextInitializer.class)
40 "consumer.bootstrap-server=${spring.embedded.kafka.brokers}",
41 "consumer.topic=" + TOPIC })
42 @EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS)
44 class ApplicationTests
46 public static final String TOPIC = "FOO";
47 public static final int PARTITIONS = 10;
50 StringSerializer stringSerializer = new StringSerializer();
51 LongSerializer longSerializer = new LongSerializer();
54 KafkaProducer<String, Bytes> kafkaProducer;
56 KafkaConsumer<String, Long> kafkaConsumer;
58 ApplicationProperties properties;
60 ExecutorService executor;
64 void commitsCurrentOffsetsOnSuccess()
66 send100Messages(i -> new Bytes(longSerializer.serialize(TOPIC, i)));
68 Set<ConsumerRecord<String, Long>> received = new HashSet<>();
69 Map<Integer, Long> offsets = runEndlessConsumer(record ->
72 if (received.size() == 100)
73 throw new WakeupException();
80 void commitsNoOffsetsOnError()
82 send100Messages(counter ->
84 ? new Bytes(stringSerializer.serialize(TOPIC, "BOOM!"))
85 : new Bytes(longSerializer.serialize(TOPIC, counter)));
87 Map<Integer, Long> oldOffsets = new HashMap<>();
88 doForCurrentOffsets((tp, offset) -> oldOffsets.put(tp.partition(), offset -1));
89 Map<Integer, Long> newOffsets = runEndlessConsumer((record) -> {});
95 void send100Messages(Function<Long, Bytes> messageGenerator)
99 for (int partition = 0; partition < 10; partition++)
101 for (int key = 0; key < 10; key++)
103 Bytes value = messageGenerator.apply(++i);
105 ProducerRecord<String, Bytes> record =
106 new ProducerRecord<>(
109 Integer.toString(key%2),
112 kafkaProducer.send(record, (metadata, e) ->
114 if (metadata != null)
118 metadata.partition(),
126 "Exception for {}={}: {}",
136 Map<Integer, Long> runEndlessConsumer(Consumer<ConsumerRecord<String, Long>> consumer)
138 Map<Integer, Long> offsets = new HashMap<>();
139 doForCurrentOffsets((tp, offset) -> offsets.put(tp.partition(), offset -1));
140 Consumer<ConsumerRecord<String, Long>> captureOffset = record -> offsets.put(record.partition(), record.offset());
141 EndlessConsumer<String, Long> endlessConsumer =
142 new EndlessConsumer<>(
144 properties.getClientId(),
145 properties.getTopic(),
147 captureOffset.andThen(consumer));
149 endlessConsumer.run();
154 List<TopicPartition> partitions()
158 .range(0, PARTITIONS)
159 .mapToObj(partition -> new TopicPartition(TOPIC, partition))
160 .collect(Collectors.toList());
163 void doForCurrentOffsets(BiConsumer<TopicPartition, Long> consumer)
165 kafkaConsumer.assign(partitions());
166 partitions().forEach(tp -> consumer.accept(tp, kafkaConsumer.position(tp)));
167 kafkaConsumer.unsubscribe();
170 void check(Map<Integer, Long> offsets)
172 doForCurrentOffsets((tp, offset) ->
174 Long expected = offsets.get(tp.partition()) + 1;
175 log.debug("Checking, if the offset for {} is {}", tp, expected);
176 assertThat(offset).isEqualTo(expected);
182 @Import(ApplicationConfiguration.class)
183 public static class Configuration
186 KafkaProducer<String, Bytes> kafkaProducer(ApplicationProperties properties)
188 Properties props = new Properties();
189 props.put("bootstrap.servers", properties.getBootstrapServer());
190 props.put("linger.ms", 100);
191 props.put("key.serializer", StringSerializer.class.getName());
192 props.put("value.serializer", BytesSerializer.class.getName());
194 return new KafkaProducer<>(props);