import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
public class ApplicationConfiguration
{
@Bean
- public Consumer<ConsumerRecord<String, ClientMessage>> consumer()
+ public Consumer<ConsumerRecord<String, ValidMessage>> consumer()
{
return (record) ->
{
}
@Bean
- public EndlessConsumer<String, ClientMessage> endlessConsumer(
- KafkaConsumer<String, ClientMessage> kafkaConsumer,
+ public EndlessConsumer<String, ValidMessage> endlessConsumer(
+ KafkaConsumer<String, ValidMessage> kafkaConsumer,
ExecutorService executor,
- Consumer<ConsumerRecord<String, ClientMessage>> handler,
+ Consumer<ConsumerRecord<String, ValidMessage>> handler,
ApplicationProperties properties)
{
return
}
@Bean(destroyMethod = "close")
- public KafkaConsumer<String, ClientMessage> kafkaConsumer(ApplicationProperties properties)
+ public KafkaConsumer<String, ValidMessage> kafkaConsumer(ApplicationProperties properties)
{
Properties props = new Properties();
props.put("metadata.max.age.ms", "1000");
props.put("key.deserializer", StringDeserializer.class.getName());
props.put("value.deserializer", JsonDeserializer.class.getName());
- props.put(JsonDeserializer.TYPE_MAPPINGS, "message:" + ClientMessage.class.getName());
+ props.put(JsonDeserializer.TYPE_MAPPINGS,
+ "message:" + ClientMessage.class.getName() + "," +
+ "greeting:" + Greeting.class.getName());
props.put(JsonDeserializer.TRUSTED_PACKAGES, "de.juplo.kafka");
return new KafkaConsumer<>(props);
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import java.time.Duration;
+import java.time.LocalDateTime;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.function.BiConsumer;
-import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@Autowired
KafkaProducer<String, Bytes> kafkaProducer;
@Autowired
- KafkaConsumer<String, ClientMessage> kafkaConsumer;
+ KafkaConsumer<String, ValidMessage> kafkaConsumer;
@Autowired
KafkaConsumer<Bytes, Bytes> offsetConsumer;
@Autowired
@Autowired
ExecutorService executor;
- Consumer<ConsumerRecord<String, ClientMessage>> testHandler;
- EndlessConsumer<String, ClientMessage> endlessConsumer;
+ Consumer<ConsumerRecord<String, ValidMessage>> testHandler;
+ EndlessConsumer<String, ValidMessage> endlessConsumer;
Map<TopicPartition, Long> oldOffsets;
Map<TopicPartition, Long> newOffsets;
- Set<ConsumerRecord<String, ClientMessage>> receivedRecords;
+ Set<ConsumerRecord<String, ValidMessage>> receivedRecords;
/** Tests methods */
{
send100Messages((partition, key, counter) ->
{
- Bytes value = serialize(key, counter);
- return new ProducerRecord<>(TOPIC, partition, key, value);
+ Bytes value;
+ String type;
+
+ if (counter%3 != 0)
+ {
+ value = serializeClientMessage(key, counter);
+ type = "message";
+ }
+ else {
+ value = serializeGreeting(key, counter);
+ type = "greeting";
+ }
+
+ return toRecord(partition, key, value, type);
});
await("100 records received")
{
send100Messages((partition, key, counter) ->
{
- Bytes value = counter == 77
- ? new Bytes(stringSerializer.serialize(TOPIC, "BOOM!"))
- : serialize(key, counter);
- return new ProducerRecord<>(TOPIC, partition, key, value);
+ Bytes value;
+ String type;
+
+ if (counter == 77)
+ {
+ value = serializeFooMessage(key, counter);
+ type = "foo";
+ }
+ else
+ {
+ if (counter%3 != 0)
+ {
+ value = serializeClientMessage(key, counter);
+ type = "message";
+ }
+ else {
+ value = serializeGreeting(key, counter);
+ type = "greeting";
+ }
+ }
+
+ return toRecord(partition, key, value, type);
});
await("Consumer failed")
ProducerRecord<String, Bytes> record =
recordGenerator.generate(partition, Integer.toString(partition*10+key%2), ++i);
- record.headers().add("__TypeId__", "message".getBytes());
kafkaProducer.send(record, (metadata, e) ->
{
if (metadata != null)
}
}
- Bytes serialize(String key, Long value)
+ ProducerRecord<String, Bytes> toRecord(int partition, String key, Bytes value, String type)
+ {
+ ProducerRecord<String, Bytes> record =
+ new ProducerRecord<>(TOPIC, partition, key, value);
+ record.headers().add("__TypeId__", type.getBytes());
+ return record;
+ }
+
+ Bytes serializeClientMessage(String key, Long value)
{
- ClientMessage message = new ClientMessage();
- message.setClient(key);
- message.setMessage(value.toString());
+ TestClientMessage message = new TestClientMessage(key, value.toString());
return new Bytes(valueSerializer.serialize(TOPIC, message));
}
+ Bytes serializeGreeting(String key, Long value)
+ {
+ TestGreeting message = new TestGreeting(key, LocalDateTime.now());
+ return new Bytes(valueSerializer.serialize(TOPIC, message));
+ }
+
+ Bytes serializeFooMessage(String key, Long value)
+ {
+ TestFooMessage message = new TestFooMessage(key, value);
+ return new Bytes(valueSerializer.serialize(TOPIC, message));
+ }
@BeforeEach
public void init()
newOffsets.put(tp, offset - 1);
});
- Consumer<ConsumerRecord<String, ClientMessage>> captureOffsetAndExecuteTestHandler =
+ Consumer<ConsumerRecord<String, ValidMessage>> captureOffsetAndExecuteTestHandler =
record ->
{
newOffsets.put(
public static class Configuration
{
@Bean
- Serializer<ClientMessage> serializer()
+ Serializer<ValidMessage> serializer()
{
return new JsonSerializer<>();
}