import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
+import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
+@EnableConfigurationProperties({ KafkaProperties.class, ApplicationProperties.class })
public class ApplicationConfiguration
{
@Bean
public ApplicationRecordHandler recordHandler(
AdderResults adderResults,
- ApplicationProperties properties)
+ KafkaProperties kafkaProperties,
+ ApplicationProperties applicationProperties)
{
return new ApplicationRecordHandler(
adderResults,
- Optional.ofNullable(properties.getThrottle()),
- properties.getClientId());
+ Optional.ofNullable(applicationProperties.getThrottle()),
+ kafkaProperties.getClientId());
}
@Bean
ApplicationRecordHandler recordHandler,
AdderResults adderResults,
StateRepository stateRepository,
- ApplicationProperties properties)
+ KafkaProperties kafkaProperties,
+ ApplicationProperties applicationProperties)
{
return new ApplicationRebalanceListener(
recordHandler,
adderResults,
stateRepository,
- properties.getClientId());
+ kafkaProperties.getClientId());
}
@Bean
ExecutorService executor,
ApplicationRebalanceListener rebalanceListener,
ApplicationRecordHandler recordHandler,
- ApplicationProperties properties)
+ KafkaProperties kafkaProperties,
+ ApplicationProperties applicationProperties)
{
return
new EndlessConsumer<>(
executor,
- properties.getClientId(),
- properties.getTopic(),
+ kafkaProperties.getClientId(),
+ applicationProperties.getTopic(),
kafkaConsumer,
rebalanceListener,
recordHandler);
}
@Bean(destroyMethod = "close")
- public KafkaConsumer<String, Message> kafkaConsumer(ApplicationProperties properties)
+ public KafkaConsumer<String, Message> kafkaConsumer(KafkaProperties kafkaProperties)
{
Properties props = new Properties();
- props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("bootstrap.servers", kafkaProperties.getBootstrapServers());
props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.StickyAssignor");
- props.put("group.id", properties.getGroupId());
- props.put("client.id", properties.getClientId());
- props.put("auto.offset.reset", properties.getAutoOffsetReset());
- props.put("auto.commit.interval.ms", (int)properties.getCommitInterval().toMillis());
- props.put("metadata.max.age.ms", "1000");
+ props.put("group.id", kafkaProperties.getConsumer().getGroupId());
+ props.put("client.id", kafkaProperties.getClientId());
+ props.put("auto.offset.reset", kafkaProperties.getConsumer().getAutoOffsetReset());
+ props.put("auto.commit.interval.ms", (int)kafkaProperties.getConsumer().getAutoCommitInterval().toMillis());
+ props.put("metadata.max.age.ms", kafkaProperties.getConsumer().getProperties().get("metadata.max.age.ms"));
props.put("key.deserializer", StringDeserializer.class.getName());
props.put("value.deserializer", JsonDeserializer.class.getName());
props.put(JsonDeserializer.TRUSTED_PACKAGES, "de.juplo.kafka");