package de.juplo.kafka;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.serialization.LongDeserializer;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
-
-import java.time.Clock;
+import org.springframework.kafka.support.serializer.JsonSerializer;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.function.Consumer;
@Configuration
public class ApplicationConfiguration
{
@Bean
- public Consumer<ConsumerRecord<String, Long>> consumer()
+ public ApplicationRecordHandler recordHandler(
+ KafkaProducer<String, Object> kafkaProducer,
+ ApplicationProperties properties)
{
- return (record) ->
- {
- // Handle record
- };
+ return new ApplicationRecordHandler(
+ kafkaProducer,
+ properties.getClientId(),
+ properties.getTopicOut());
}
@Bean
- public EndlessConsumer<String, Long> endlessConsumer(
- KafkaConsumer<String, Long> kafkaConsumer,
+ public EndlessConsumer<String, Integer> endlessConsumer(
+ KafkaConsumer<String, Integer> kafkaConsumer,
ExecutorService executor,
- Consumer<ConsumerRecord<String, Long>> handler,
- PartitionStatisticsRepository repository,
+ ApplicationRecordHandler recordHandler,
ApplicationProperties properties)
{
return
new EndlessConsumer<>(
executor,
- repository,
properties.getClientId(),
- properties.getTopic(),
- Clock.systemDefaultZone(),
- properties.getCommitInterval(),
+ properties.getTopicIn(),
kafkaConsumer,
- handler);
+ recordHandler);
}
@Bean
}
@Bean(destroyMethod = "close")
- public KafkaConsumer<String, Long> kafkaConsumer(ApplicationProperties properties)
+ public KafkaConsumer<String, Integer> kafkaConsumer(ApplicationProperties properties)
{
Properties props = new Properties();
props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor");
props.put("group.id", properties.getGroupId());
props.put("client.id", properties.getClientId());
- props.put("enable.auto.commit", false);
props.put("auto.offset.reset", properties.getAutoOffsetReset());
+ props.put("auto.commit.interval.ms", (int)properties.getCommitInterval().toMillis());
props.put("metadata.max.age.ms", "1000");
props.put("key.deserializer", StringDeserializer.class.getName());
- props.put("value.deserializer", LongDeserializer.class.getName());
+ props.put("value.deserializer", IntegerDeserializer.class.getName());
return new KafkaConsumer<>(props);
}
+
+ @Bean(destroyMethod = "close")
+ public KafkaProducer<String, Object> kafkaProducer(ApplicationProperties properties)
+ {
+ Properties props = new Properties();
+ props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("client.id", properties.getClientId());
+ props.put("acks", properties.getAcks());
+ props.put("batch.size", properties.getBatchSize());
+ props.put("delivery.timeout.ms", 20000); // 20 Sekunden
+ props.put("request.timeout.ms", 10000); // 10 Sekunden
+ props.put("linger.ms", properties.getLingerMs());
+ props.put("compression.type", properties.getCompressionType());
+ props.put("key.serializer", StringSerializer.class.getName());
+ props.put("value.serializer", "TODO: JsonSerializer konfigurieren");
+
+ return new KafkaProducer<>(props);
+ }
}