package de.juplo.kafka;
import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
-import java.time.Clock;
-import java.util.Properties;
+import org.springframework.kafka.core.ConsumerFactory;
+
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
+@EnableConfigurationProperties({ KafkaProperties.class, ApplicationProperties.class })
public class ApplicationConfiguration
{
@Bean
- public AdderRecordHandler sumRecordHandler()
- {
- return new AdderRecordHandler();
- }
-
- @Bean
- public AdderRebalanceListener sumRebalanceListener(
- AdderRecordHandler adderRecordHandler,
- PartitionStatisticsRepository repository,
- Consumer<String, String> consumer,
- ApplicationProperties properties)
- {
- return new AdderRebalanceListener(
- adderRecordHandler,
- repository,
- properties.getClientId(),
- properties.getTopic(),
- Clock.systemDefaultZone(),
- properties.getCommitInterval(),
- consumer);
- }
-
- @Bean
- public EndlessConsumer<String, String> endlessConsumer(
- KafkaConsumer<String, String> kafkaConsumer,
+ public SimpleConsumer endlessConsumer(
+ Consumer<String, String> kafkaConsumer,
ExecutorService executor,
- AdderRebalanceListener adderRebalanceListener,
- AdderRecordHandler adderRecordHandler,
- ApplicationProperties properties)
+ KafkaProperties kafkaProperties,
+ ApplicationProperties applicationProperties)
{
return
- new EndlessConsumer<>(
+ new SimpleConsumer(
executor,
- properties.getClientId(),
- properties.getTopic(),
- kafkaConsumer,
- adderRebalanceListener,
- adderRecordHandler);
+ kafkaProperties.getClientId(),
+ applicationProperties.getTopic(),
+ kafkaConsumer);
}
@Bean
}
@Bean(destroyMethod = "close")
- public KafkaConsumer<String, String> kafkaConsumer(ApplicationProperties properties)
+ public Consumer<String, String> kafkaConsumer(ConsumerFactory<String, String> factory)
{
- Properties props = new Properties();
-
- props.put("bootstrap.servers", properties.getBootstrapServer());
- props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor");
- props.put("group.id", properties.getGroupId());
- props.put("client.id", properties.getClientId());
- props.put("enable.auto.commit", false);
- props.put("auto.offset.reset", properties.getAutoOffsetReset());
- props.put("metadata.max.age.ms", "1000");
- props.put("key.deserializer", StringDeserializer.class.getName());
- props.put("value.deserializer", StringDeserializer.class.getName());
-
- return new KafkaConsumer<>(props);
+ return factory.createConsumer();
}
}