import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
+import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.function.BiConsumer;
+import java.util.function.BiFunction;
import java.util.function.Consumer;
-import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@Autowired
KafkaProducer<String, Bytes> kafkaProducer;
@Autowired
- KafkaConsumer<String, Long> kafkaConsumer;
+ KafkaConsumer<String, ClientMessage> kafkaConsumer;
@Autowired
KafkaConsumer<Bytes, Bytes> offsetConsumer;
@Autowired
@Autowired
ExecutorService executor;
- Consumer<ConsumerRecord<String, Long>> testHandler;
- EndlessConsumer<String, Long> endlessConsumer;
+ Consumer<ConsumerRecord<String, ClientMessage>> testHandler;
+ EndlessConsumer<String, ClientMessage> endlessConsumer;
Map<TopicPartition, Long> oldOffsets;
Map<TopicPartition, Long> newOffsets;
- Set<ConsumerRecord<String, Long>> receivedRecords;
+ Set<ConsumerRecord<String, ClientMessage>> receivedRecords;
/** Tests methods */
@Order(1) // << The poistion pill is not skipped. Hence, this test must run first
void commitsCurrentOffsetsOnSuccess() throws ExecutionException, InterruptedException
{
- send100Messages((key, counter) -> serialize(key, counter));
+ send100Messages((partition, key, counter) ->
+ {
- Bytes value = new Bytes(valueSerializer.serialize(TOPIC, counter));
++ Bytes value = serialize(key, counter);
+ return new ProducerRecord<>(TOPIC, partition, key, value);
+ });
await("100 records received")
.atMost(Duration.ofSeconds(30))
@Order(2)
void commitsOffsetOfErrorForReprocessingOnError()
{
- send100Messages((key, counter) ->
- counter == 77
- ? new Bytes(stringSerializer.serialize(TOPIC, "BOOM!"))
- : serialize(key, counter));
+ send100Messages((partition, key, counter) ->
+ {
+ Bytes value = counter == 77
+ ? new Bytes(stringSerializer.serialize(TOPIC, "BOOM!"))
- : new Bytes(valueSerializer.serialize(TOPIC, counter));
++ : serialize(key, counter);
+ return new ProducerRecord<>(TOPIC, partition, key, value);
+ });
await("Consumer failed")
.atMost(Duration.ofSeconds(30))
}
- void send100Messages(BiFunction<Integer, Long, Bytes> messageGenerator)
+ public interface RecordGenerator<K, V>
+ {
+ public ProducerRecord<String, Bytes> generate(int partition, String key, long counter);
+ }
+
+ void send100Messages(RecordGenerator recordGenerator)
{
long i = 0;
{
for (int key = 0; key < 10; key++)
{
- Bytes value = messageGenerator.apply(key, ++i);
-
ProducerRecord<String, Bytes> record =
- new ProducerRecord<>(
- TOPIC,
- partition,
- Integer.toString(key%2),
- value);
+ recordGenerator.generate(partition, Integer.toString(partition*10+key%2), ++i);
+ record.headers().add("__TypeId__", "message".getBytes());
kafkaProducer.send(record, (metadata, e) ->
{
if (metadata != null)
}
}
- Bytes serialize(Integer key, Long value)
++ Bytes serialize(String key, Long value)
+ {
+ ClientMessage message = new ClientMessage();
- message.setClient(key.toString());
++ message.setClient(key);
+ message.setMessage(value.toString());
+ return new Bytes(valueSerializer.serialize(TOPIC, message));
+ }
+
@BeforeEach
public void init()
newOffsets.put(tp, offset - 1);
});
- Consumer<ConsumerRecord<String, Long>> captureOffsetAndExecuteTestHandler =
+ Consumer<ConsumerRecord<String, ClientMessage>> captureOffsetAndExecuteTestHandler =
record ->
{
newOffsets.put(
public static class Configuration
{
@Bean
- Serializer<Long> serializer()
+ Serializer<ClientMessage> serializer()
{
- return new LongSerializer();
+ return new JsonSerializer<>();
}
@Bean