package de.juplo.kafka;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
+import java.util.function.Consumer;
@Configuration
@EnableConfigurationProperties(ApplicationProperties.class)
public class ApplicationConfiguration
{
+ @Bean
+ public Consumer<ConsumerRecord<String, String>> consumer()
+ {
+ return (record) ->
+ {
+ // Handle record
+ };
+ }
+
@Bean
public EndlessConsumer endlessConsumer(
KafkaConsumer<String, String> kafkaConsumer,
ExecutorService executor,
+ Consumer<ConsumerRecord<String, String>> handler,
ApplicationProperties properties)
{
return
executor,
properties.getClientId(),
properties.getTopic(),
- kafkaConsumer);
+ kafkaConsumer,
+ handler);
}
@Bean
private final String id;
private final String topic;
private final Consumer<String, String> consumer;
+ private final java.util.function.Consumer<ConsumerRecord<String, String>> handler;
private final Lock lock = new ReentrantLock();
private final Condition condition = lock.newCondition();
log.info("{} - Received {} messages", id, records.count());
for (ConsumerRecord<String, String> record : records)
{
- consumed++;
log.info(
"{} - {}: {}/{} - {}={}",
id,
record.value()
);
+ handler.accept(record);
+
+ consumed++;
+
Integer partition = record.partition();
String key = record.key() == null ? "NULL" : record.key();
Map<String, Long> byKey = seen.get(partition);