import org.junit.jupiter.api.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
+import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.test.context.ConfigDataApplicationContextInitializer;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
+import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.function.BiConsumer;
+import java.util.function.BiFunction;
import java.util.function.Consumer;
-import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@SpringJUnitConfig(
initializers = ConfigDataApplicationContextInitializer.class,
- classes = {
+ classes = {
EndlessConsumer.class,
KafkaAutoConfiguration.class,
ApplicationTests.Configuration.class })
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@TestPropertySource(
properties = {
- "consumer.bootstrap-server=${spring.embedded.kafka.brokers}",
+ "spring.kafka.consumer.bootstrap-servers=${spring.embedded.kafka.brokers}",
"consumer.topic=" + TOPIC })
@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS)
@Slf4j
@Autowired
KafkaProducer<String, Bytes> kafkaProducer;
@Autowired
+ org.apache.kafka.clients.consumer.Consumer<String, ClientMessage> kafkaConsumer;
+ @Autowired
KafkaConsumer<Bytes, Bytes> offsetConsumer;
@Autowired
- ApplicationProperties properties;
+ ApplicationProperties applicationProperties;
+ @Autowired
+ KafkaProperties kafkaProperties;
@Autowired
EndlessConsumer endlessConsumer;
@Autowired
Map<TopicPartition, Long> oldOffsets;
Map<TopicPartition, Long> newOffsets;
- Set<ConsumerRecord<String, Long>> receivedRecords;
+ Set<ConsumerRecord<String, ClientMessage>> receivedRecords;
/** Tests methods */
@Order(1) // << The poistion pill is not skipped. Hence, this test must run first
void commitsCurrentOffsetsOnSuccess() throws ExecutionException, InterruptedException
{
- send100Messages(i -> new Bytes(valueSerializer.serialize(TOPIC, i)));
+ send100Messages((key, counter) -> serialize(key, counter));
await("100 records received")
.atMost(Duration.ofSeconds(30))
@Order(2)
void commitsOffsetOfErrorForReprocessingOnError()
{
- send100Messages(counter ->
+ send100Messages((key, counter) ->
counter == 77
? new Bytes(stringSerializer.serialize(TOPIC, "BOOM!"))
- : new Bytes(valueSerializer.serialize(TOPIC, counter)));
+ : serialize(key, counter));
await("Consumer failed")
.atMost(Duration.ofSeconds(30))
}
- void send100Messages(Function<Long, Bytes> messageGenerator)
+ void send100Messages(BiFunction<Integer, Long, Bytes> messageGenerator)
{
long i = 0;
{
for (int key = 0; key < 10; key++)
{
- Bytes value = messageGenerator.apply(++i);
+ Bytes value = messageGenerator.apply(key, ++i);
ProducerRecord<String, Bytes> record =
new ProducerRecord<>(
Integer.toString(key%2),
value);
+ record.headers().add("__TypeId__", "message".getBytes());
kafkaProducer.send(record, (metadata, e) ->
{
if (metadata != null)
}
}
+ Bytes serialize(Integer key, Long value)
+ {
+ ClientMessage message = new ClientMessage();
+ message.setClient(key.toString());
+ message.setMessage(value.toString());
+ return new Bytes(valueSerializer.serialize(TOPIC, message));
+ }
+
@BeforeEach
public void init()
}
}
- public static class RecordHandler implements Consumer<ConsumerRecord<String, Long>>
+ public static class RecordHandler implements Consumer<ConsumerRecord<String, ClientMessage>>
{
- Consumer<ConsumerRecord<String, Long>> captureOffsets;
- Consumer<ConsumerRecord<String, Long>> testHandler;
+ Consumer<ConsumerRecord<String, ClientMessage>> captureOffsets;
+ Consumer<ConsumerRecord<String, ClientMessage>> testHandler;
@Override
- public void accept(ConsumerRecord<String, Long> record)
+ public void accept(ConsumerRecord<String, ClientMessage> record)
{
captureOffsets
.andThen(testHandler)
{
@Primary
@Bean
- public Consumer<ConsumerRecord<String, Long>> testHandler()
+ public Consumer<ConsumerRecord<String, ClientMessage>> testHandler()
{
return new RecordHandler();
}
@Bean
- Serializer<Long> serializer()
+ Serializer<ClientMessage> serializer()
{
- return new LongSerializer();
+ return new JsonSerializer<>();
}
@Bean
- KafkaProducer<String, Bytes> kafkaProducer(ApplicationProperties properties)
+ KafkaProducer<String, Bytes> kafkaProducer(KafkaProperties properties)
{
Properties props = new Properties();
- props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("bootstrap.servers", properties.getConsumer().getBootstrapServers());
props.put("linger.ms", 100);
props.put("key.serializer", StringSerializer.class.getName());
props.put("value.serializer", BytesSerializer.class.getName());
}
@Bean
- KafkaConsumer<Bytes, Bytes> offsetConsumer(ApplicationProperties properties)
+ KafkaConsumer<Bytes, Bytes> offsetConsumer(KafkaProperties properties)
{
Properties props = new Properties();
- props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("bootstrap.servers", properties.getConsumer().getBootstrapServers());
props.put("client.id", "OFFSET-CONSUMER");
- props.put("group.id", properties.getGroupId());
+ props.put("group.id", properties.getConsumer().getGroupId());
props.put("key.deserializer", BytesDeserializer.class.getName());
props.put("value.deserializer", BytesDeserializer.class.getName());