package de.juplo.kafka;
+import lombok.Value;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import java.time.Duration;
+import java.time.LocalDateTime;
import java.util.*;
-import java.util.concurrent.ExecutionException;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.stream.Collectors;
public static final int PARTITIONS = 10;
- StringSerializer stringSerializer = new StringSerializer();
-
@Autowired
Serializer valueSerializer;
@Autowired
@Autowired
EndlessConsumer endlessConsumer;
@Autowired
- ClientMessageHandler clientMessageHandler;
+ MessageHandler<ClientMessage> clientMessageHandler;
+ @Autowired
+ MessageHandler<Greeting> greetingsHandler;
Map<TopicPartition, Long> oldOffsets;
Map<TopicPartition, Long> newOffsets;
- Set<ClientMessage> received;
+ Set<Object> received;
/** Tests methods */
@Test
- void commitsCurrentOffsetsOnSuccess() throws ExecutionException, InterruptedException
+ void commitsCurrentOffsetsOnSuccess()
{
- send100Messages((key, counter) -> serialize(key, counter));
+ send100Messages((key, counter) -> serializeAsClientMessage(key, counter));
+
+ await("100 records received")
+ .atMost(Duration.ofSeconds(30))
+ .until(() -> received.size() == 100);
+
+ await("Offsets committed")
+ .atMost(Duration.ofSeconds(10))
+ .untilAsserted(() ->
+ {
+ checkSeenOffsetsForProgress();
+ compareToCommitedOffsets(newOffsets);
+ });
+
+ assertThat(endlessConsumer.isRunning())
+ .describedAs("Consumer should still be running")
+ .isTrue();
+ }
+
+
+ @Test
+ void mixedMessages()
+ {
+ send100Messages((key, counter) ->
+ counter%3 == 0
+ ? serializeAsGreeting(key)
+ : serializeAsClientMessage(key, counter));
await("100 records received")
.atMost(Duration.ofSeconds(30))
{
send100Messages((key, counter) ->
counter == 77
- ? new Bytes(stringSerializer.serialize(TOPIC, "BOOM!"))
- : serialize(key, counter));
+ ? serializeString("BOOM!", "message")
+ : serializeAsClientMessage(key, counter));
await("99 records received")
.atMost(Duration.ofSeconds(30))
throw new RuntimeException("BOOM: " + clientMessage.message + "%10 == 0");
};
- send100Messages((key, counter) -> serialize(key, counter));
+ send100Messages((key, counter) -> serializeAsClientMessage(key, counter));
await("80 records received")
.atMost(Duration.ofSeconds(30))
}
- void send100Messages(BiFunction<Integer, Long, Bytes> messageGenerator)
+ void send100Messages(BiFunction<Integer, Long, BytesAndType> recordGenerator)
{
long i = 0;
{
for (int key = 0; key < 10; key++)
{
- Bytes value = messageGenerator.apply(key, ++i);
+ BytesAndType bat = recordGenerator.apply(key, ++i);
ProducerRecord<String, Bytes> record =
new ProducerRecord<>(
TOPIC,
partition,
Integer.toString(key%2),
- value);
+ bat.getValue());
- record.headers().add("__TypeId__", "message".getBytes());
+ record.headers().add("__TypeId__", bat.getType());
kafkaProducer.send(record, (metadata, e) ->
{
if (metadata != null)
}
}
- Bytes serialize(Integer key, Long value)
+ BytesAndType serializeAsClientMessage(Integer key, Long value)
{
ClientMessage message = new ClientMessage();
message.setClient(key.toString());
message.setMessage(value.toString());
+ return new BytesAndType(serialize(message), "message");
+ }
+
+ BytesAndType serializeAsGreeting(Integer key)
+ {
+ Greeting greeting = new Greeting();
+ greeting.setName(key.toString());
+ greeting.setWhen(LocalDateTime.now());
+ return new BytesAndType(serialize(greeting), "greeting");
+ }
+
+ BytesAndType serializeString(String message, String messageType)
+ {
+ return new BytesAndType(new Bytes(message.getBytes()), messageType);
+ }
+
+ Bytes serialize(Object message)
+ {
return new Bytes(valueSerializer.serialize(TOPIC, message));
}
public void init()
{
clientMessageHandler.testHandler = (clientMessage, metadata) -> {};
+ greetingsHandler.testHandler = (greeting, metadata) -> {};
oldOffsets = new HashMap<>();
newOffsets = new HashMap<>();
newOffsets.put(tp, offset - 1);
});
- clientMessageHandler.captureOffsets =
+ BiConsumer<?, ConsumerRecordMetadata> captureOffsets =
(clientMessage, metadata) ->
{
received.add(clientMessage);
new TopicPartition(metadata.topic(), metadata.partition()), metadata.offset());
};
+ clientMessageHandler.captureOffsets =
+ (BiConsumer<ClientMessage, ConsumerRecordMetadata>)captureOffsets;
+ greetingsHandler.captureOffsets =
+ (BiConsumer<Greeting, ConsumerRecordMetadata>)captureOffsets;
+
endlessConsumer.start();
}
}
}
- public static class ClientMessageHandler implements BiConsumer<ClientMessage, ConsumerRecordMetadata>
+ public static class MessageHandler<T> implements BiConsumer<T, ConsumerRecordMetadata>
{
- BiConsumer<ClientMessage, ConsumerRecordMetadata> captureOffsets;
- BiConsumer<ClientMessage, ConsumerRecordMetadata> testHandler;
+ BiConsumer<T, ConsumerRecordMetadata> captureOffsets;
+ BiConsumer<T, ConsumerRecordMetadata> testHandler;
@Override
- public void accept(ClientMessage clientMessage, ConsumerRecordMetadata metadata)
+ public void accept(T message, ConsumerRecordMetadata metadata)
{
captureOffsets
.andThen(testHandler)
- .accept(clientMessage, metadata);
+ .accept(message, metadata);
}
}
{
@Primary
@Bean
- public BiConsumer<ClientMessage, ConsumerRecordMetadata> testHandler()
+ public MessageHandler<ClientMessage> messageHandler()
+ {
+ return new MessageHandler<>();
+ }
+
+ @Primary
+ @Bean
+ public MessageHandler<Greeting> greetingsHandler()
{
- return new ClientMessageHandler();
+ return new MessageHandler<>();
}
@Bean
return new KafkaConsumer<>(props);
}
}
+
+
+ @Value
+ static class BytesAndType
+ {
+ private final Bytes value;
+ private final byte[] type;
+
+
+ BytesAndType(Bytes value, String type)
+ {
+ this.value = value;
+ this.type = type.getBytes();
+ }
+ }
}