package de.juplo.kafka;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.serialization.LongDeserializer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
+import org.springframework.kafka.listener.CommonContainerStoppingErrorHandler;
-import java.util.Properties;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
+import java.util.function.Consumer;
@Configuration
public class ApplicationConfiguration
{
@Bean
- public EndlessConsumer endlessConsumer(
- KafkaConsumer<String, String> kafkaConsumer,
- ExecutorService executor,
- ApplicationProperties properties)
+ public Consumer<ConsumerRecord<String, Long>> consumer()
{
- return
- new EndlessConsumer(
- executor,
- properties.getClientId(),
- properties.getTopic(),
- kafkaConsumer);
+ return (record) ->
+ {
+ // Handle record
+ };
}
@Bean
- public ExecutorService executor()
+ public CommonContainerStoppingErrorHandler errorHandler()
{
- return Executors.newSingleThreadExecutor();
- }
-
- @Bean(destroyMethod = "close")
- public KafkaConsumer<String, String> kafkaConsumer(ApplicationProperties properties)
- {
- Properties props = new Properties();
-
- props.put("bootstrap.servers", properties.getBootstrapServer());
- props.put("group.id", properties.getGroupId());
- props.put("client.id", properties.getClientId());
- props.put("auto.offset.reset", properties.getAutoOffsetReset());
- props.put("metadata.max.age.ms", "1000");
- props.put("key.deserializer", StringDeserializer.class.getName());
- props.put("value.deserializer", LongDeserializer.class.getName());
-
- return new KafkaConsumer<>(props);
+ return new CommonContainerStoppingErrorHandler();
}
}