import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.RecordDeserializationException;
-import org.apache.kafka.common.serialization.BytesDeserializer;
-import org.apache.kafka.common.serialization.BytesSerializer;
-import org.apache.kafka.common.serialization.LongSerializer;
-import org.apache.kafka.common.serialization.StringSerializer;
+import org.apache.kafka.common.serialization.*;
import org.apache.kafka.common.utils.Bytes;
import org.junit.jupiter.api.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
+import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.function.BiConsumer;
+import java.util.function.BiFunction;
import java.util.function.Consumer;
-import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static de.juplo.kafka.ApplicationTests.PARTITIONS;
import static de.juplo.kafka.ApplicationTests.TOPIC;
-import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.*;
import static org.awaitility.Awaitility.*;
-import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
-import static org.junit.jupiter.api.Assertions.assertThrows;
@SpringJUnitConfig(initializers = ConfigDataApplicationContextInitializer.class)
StringSerializer stringSerializer = new StringSerializer();
- LongSerializer longSerializer = new LongSerializer();
+ @Autowired
+ Serializer valueSerializer;
@Autowired
KafkaProducer<String, Bytes> kafkaProducer;
@Autowired
- KafkaConsumer<String, Long> kafkaConsumer;
+ KafkaConsumer<String, ClientMessage> kafkaConsumer;
@Autowired
KafkaConsumer<Bytes, Bytes> offsetConsumer;
@Autowired
@Autowired
ExecutorService executor;
- Consumer<ConsumerRecord<String, Long>> testHandler;
- EndlessConsumer<String, Long> endlessConsumer;
+ Consumer<ConsumerRecord<String, ClientMessage>> testHandler;
+ EndlessConsumer<String, ClientMessage> endlessConsumer;
Map<TopicPartition, Long> oldOffsets;
Map<TopicPartition, Long> newOffsets;
- Set<ConsumerRecord<String, Long>> receivedRecords;
+ Set<ConsumerRecord<String, ClientMessage>> receivedRecords;
/** Tests methods */
@Order(1) // << The poistion pill is not skipped. Hence, this test must run first
void commitsCurrentOffsetsOnSuccess() throws ExecutionException, InterruptedException
{
- send100Messages(i -> new Bytes(longSerializer.serialize(TOPIC, i)));
+ send100Messages((key, counter) -> serialize(key, counter));
await("100 records received")
.atMost(Duration.ofSeconds(30))
compareToCommitedOffsets(newOffsets);
});
- assertThrows(
- IllegalStateException.class,
- () -> endlessConsumer.exitStatus(),
- "Consumer should still be running");
+ assertThatExceptionOfType(IllegalStateException.class)
+ .isThrownBy(() -> endlessConsumer.exitStatus())
+ .describedAs("Consumer should still be running");
}
@Test
@Order(2)
void commitsOffsetOfErrorForReprocessingOnError()
{
- send100Messages(counter ->
+ send100Messages((key, counter) ->
counter == 77
? new Bytes(stringSerializer.serialize(TOPIC, "BOOM!"))
- : new Bytes(longSerializer.serialize(TOPIC, counter)));
+ : serialize(key, counter));
await("Consumer failed")
.atMost(Duration.ofSeconds(30))
.describedAs("Received not all sent events")
.isLessThan(100);
- assertDoesNotThrow(
- () -> endlessConsumer.exitStatus(),
- "Consumer should not be running");
+ assertThatNoException()
+ .describedAs("Consumer should not be running")
+ .isThrownBy(() -> endlessConsumer.exitStatus());
assertThat(endlessConsumer.exitStatus())
.describedAs("Consumer should have exited abnormally")
.containsInstanceOf(RecordDeserializationException.class);
}
- void send100Messages(Function<Long, Bytes> messageGenerator)
+ void send100Messages(BiFunction<Integer, Long, Bytes> messageGenerator)
{
long i = 0;
{
for (int key = 0; key < 10; key++)
{
- Bytes value = messageGenerator.apply(++i);
+ Bytes value = messageGenerator.apply(key, ++i);
ProducerRecord<String, Bytes> record =
new ProducerRecord<>(
Integer.toString(key%2),
value);
+ record.headers().add("__TypeId__", "message".getBytes());
kafkaProducer.send(record, (metadata, e) ->
{
if (metadata != null)
}
}
+ Bytes serialize(Integer key, Long value)
+ {
+ ClientMessage message = new ClientMessage();
+ message.setClient(key.toString());
+ message.setMessage(value.toString());
+ return new Bytes(valueSerializer.serialize(TOPIC, message));
+ }
+
@BeforeEach
public void init()
newOffsets.put(tp, offset - 1);
});
- Consumer<ConsumerRecord<String, Long>> captureOffsetAndExecuteTestHandler =
+ Consumer<ConsumerRecord<String, ClientMessage>> captureOffsetAndExecuteTestHandler =
record ->
{
newOffsets.put(
@Import(ApplicationConfiguration.class)
public static class Configuration
{
+ @Bean
+ Serializer<ClientMessage> serializer()
+ {
+ return new JsonSerializer<>();
+ }
+
@Bean
KafkaProducer<String, Bytes> kafkaProducer(ApplicationProperties properties)
{