package de.juplo.kafka;
-import org.springframework.beans.factory.annotation.Autowired;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.serialization.LongDeserializer;
+import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
+import java.util.Properties;
import java.util.concurrent.Executors;
@EnableConfigurationProperties(ApplicationProperties.class)
public class Application
{
- @Autowired
- ApplicationProperties properties;
-
-
@Bean
- public EndlessConsumer consumer()
+ public EndlessConsumer endlessConsumer(
+ KafkaConsumer<String, String> kafkaConsumer,
+ ApplicationProperties properties)
{
EndlessConsumer consumer =
new EndlessConsumer(
Executors.newFixedThreadPool(1),
- properties.getBootstrapServer(),
- properties.getGroupId(),
properties.getClientId(),
properties.getTopic(),
- properties.getAutoOffsetReset());
+ kafkaConsumer);
consumer.start();
return consumer;
}
+ @Bean(destroyMethod = "close")
+ public KafkaConsumer<String, String> kafkaConsumer(ApplicationProperties properties)
+ {
+ Properties props = new Properties();
+
+ props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("group.id", properties.getGroupId());
+ props.put("client.id", properties.getClientId());
+ props.put("auto.offset.reset", properties.getAutoOffsetReset());
+ props.put("metadata.max.age.ms", "1000");
+ props.put("key.deserializer", StringDeserializer.class.getName());
+ props.put("value.deserializer", LongDeserializer.class.getName());
+
+ return new KafkaConsumer<>(props);
+ }
+
+
public static void main(String[] args)
{
SpringApplication.run(Application.class, args);