import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
+import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
+@EnableConfigurationProperties({ KafkaProperties.class, ApplicationProperties.class })
public class ApplicationConfiguration
{
@Bean
public ApplicationRecordHandler recordHandler(
AdderResults adderResults,
- ApplicationProperties properties)
+ KafkaProperties kafkaProperties,
+ ApplicationProperties applicationProperties)
{
return new ApplicationRecordHandler(
adderResults,
- Optional.ofNullable(properties.getThrottle()),
- properties.getClientId());
+ Optional.ofNullable(applicationProperties.getThrottle()),
+ kafkaProperties.getClientId());
}
@Bean
AdderResults adderResults,
StateRepository stateRepository,
Consumer<String, Message> consumer,
- ApplicationProperties properties)
+ KafkaProperties kafkaProperties,
+ ApplicationProperties applicationProperties)
{
return new ApplicationRebalanceListener(
recordHandler,
adderResults,
stateRepository,
- properties.getClientId(),
- properties.getTopic(),
+ kafkaProperties.getClientId(),
+ applicationProperties.getTopic(),
Clock.systemDefaultZone(),
- properties.getCommitInterval(),
+ kafkaProperties.getConsumer().getAutoCommitInterval(),
consumer);
}
ExecutorService executor,
ApplicationRebalanceListener rebalanceListener,
ApplicationRecordHandler recordHandler,
- ApplicationProperties properties)
+ KafkaProperties kafkaProperties,
+ ApplicationProperties applicationProperties)
{
return
new EndlessConsumer<>(
executor,
- properties.getClientId(),
- properties.getTopic(),
+ kafkaProperties.getClientId(),
+ applicationProperties.getTopic(),
kafkaConsumer,
rebalanceListener,
recordHandler);
}
@Bean(destroyMethod = "close")
- public KafkaConsumer<String, Message> kafkaConsumer(ApplicationProperties properties)
+ public KafkaConsumer<String, Message> kafkaConsumer(KafkaProperties kafkaProperties)
{
Properties props = new Properties();
- props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("bootstrap.servers", kafkaProperties.getBootstrapServers());
props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor");
- props.put("group.id", properties.getGroupId());
- props.put("client.id", properties.getClientId());
+ props.put("group.id", kafkaProperties.getConsumer().getGroupId());
+ props.put("client.id", kafkaProperties.getClientId());
props.put("enable.auto.commit", false);
- props.put("auto.offset.reset", properties.getAutoOffsetReset());
- props.put("metadata.max.age.ms", "1000");
+ props.put("auto.offset.reset", kafkaProperties.getConsumer().getAutoOffsetReset());
+ props.put("metadata.max.age.ms", kafkaProperties.getConsumer().getProperties().get("metadata.max.age.ms"));
props.put("key.deserializer", StringDeserializer.class.getName());
props.put("value.deserializer", JsonDeserializer.class.getName());
props.put(JsonDeserializer.TRUSTED_PACKAGES, "de.juplo.kafka");
import org.junit.jupiter.api.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.autoconfigure.mongo.MongoProperties;
import org.springframework.boot.test.autoconfigure.data.mongo.AutoConfigureDataMongo;
import org.springframework.boot.test.context.ConfigDataApplicationContextInitializer;
@SpringJUnitConfig(initializers = ConfigDataApplicationContextInitializer.class)
@TestPropertySource(
properties = {
- "sumup.adder.bootstrap-server=${spring.embedded.kafka.brokers}",
+ "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
"sumup.adder.topic=" + TOPIC,
- "sumup.adder.commit-interval=500ms",
+ "spring.kafka.consumer.auto-commit-interval=500ms",
"spring.mongodb.embedded.version=4.4.13" })
@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS)
@EnableAutoConfiguration
@Autowired
Consumer<ConsumerRecord<K, V>> consumer;
@Autowired
- ApplicationProperties properties;
+ ApplicationProperties applicationProperties;
+ @Autowired
+ KafkaProperties kafkaProperties;
@Autowired
ExecutorService executor;
@Autowired
{
Properties props;
props = new Properties();
- props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("bootstrap.servers", kafkaProperties.getBootstrapServers());
props.put("linger.ms", 100);
props.put("key.serializer", BytesSerializer.class.getName());
props.put("value.serializer", BytesSerializer.class.getName());
testRecordProducer = new KafkaProducer<>(props);
props = new Properties();
- props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("bootstrap.servers", kafkaProperties.getBootstrapServers());
props.put("client.id", "OFFSET-CONSUMER");
- props.put("group.id", properties.getGroupId());
+ props.put("group.id", kafkaProperties.getConsumer().getGroupId());
props.put("key.deserializer", BytesDeserializer.class.getName());
props.put("value.deserializer", BytesDeserializer.class.getName());
offsetConsumer = new KafkaConsumer<>(props);
endlessConsumer =
new EndlessConsumer<>(
executor,
- properties.getClientId(),
- properties.getTopic(),
+ kafkaProperties.getClientId(),
+ applicationProperties.getTopic(),
kafkaConsumer,
rebalanceListener,
captureOffsetAndExecuteTestHandler);