import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.RecordDeserializationException;
-import org.apache.kafka.common.serialization.BytesDeserializer;
-import org.apache.kafka.common.serialization.BytesSerializer;
-import org.apache.kafka.common.serialization.LongSerializer;
-import org.apache.kafka.common.serialization.StringSerializer;
+import org.apache.kafka.common.serialization.*;
import org.apache.kafka.common.utils.Bytes;
import org.junit.jupiter.api.*;
import org.springframework.beans.factory.annotation.Autowired;
StringSerializer stringSerializer = new StringSerializer();
- LongSerializer longSerializer = new LongSerializer();
+ @Autowired
+ Serializer valueSerializer;
@Autowired
KafkaProducer<String, Bytes> kafkaProducer;
@Autowired
@Order(1) // << The poistion pill is not skipped. Hence, this test must run first
void commitsCurrentOffsetsOnSuccess() throws ExecutionException, InterruptedException
{
- send100Messages(i -> new Bytes(longSerializer.serialize(TOPIC, i)));
+ send100Messages(i -> new Bytes(valueSerializer.serialize(TOPIC, i)));
await("100 records received")
.atMost(Duration.ofSeconds(30))
send100Messages(counter ->
counter == 77
? new Bytes(stringSerializer.serialize(TOPIC, "BOOM!"))
- : new Bytes(longSerializer.serialize(TOPIC, counter)));
+ : new Bytes(valueSerializer.serialize(TOPIC, counter)));
await("Consumer failed")
.atMost(Duration.ofSeconds(30))
@Import(ApplicationConfiguration.class)
public static class Configuration
{
+ @Bean
+ Serializer<Long> serializer()
+ {
+ return new LongSerializer();
+ }
+
@Bean
KafkaProducer<String, Bytes> kafkaProducer(ApplicationProperties properties)
{