package de.juplo.kafka;
-import org.apache.kafka.clients.consumer.Consumer;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.Optional;
-import org.springframework.kafka.core.ConsumerFactory;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
+import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
@Configuration
kafkaProperties.getClientId());
}
+ @Bean
+ public ApplicationErrorHandler applicationErrorHandler()
+ {
+ return new ApplicationErrorHandler();
+ }
+
@Bean
public EndlessConsumer<String, Message> endlessConsumer(
- Consumer<String, Message> kafkaConsumer,
- ExecutorService executor,
- ApplicationRebalanceListener rebalanceListener,
RecordHandler recordHandler,
+ ApplicationErrorHandler errorHandler,
KafkaProperties kafkaProperties,
- ApplicationProperties applicationProperties)
+ KafkaListenerEndpointRegistry endpointRegistry)
{
return
new EndlessConsumer<>(
- executor,
kafkaProperties.getClientId(),
- applicationProperties.getTopic(),
- kafkaConsumer,
- rebalanceListener,
+ endpointRegistry,
+ errorHandler,
recordHandler);
}
-
- @Bean
- public ExecutorService executor()
- {
- return Executors.newSingleThreadExecutor();
- }
-
- @Bean(destroyMethod = "close")
- public Consumer<String, Message> kafkaConsumer(ConsumerFactory<String, Message> factory)
- {
- return factory.createConsumer();
- }
}
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.*;
-import org.apache.kafka.common.TopicPartition;
-import org.apache.kafka.common.errors.RecordDeserializationException;
-import org.apache.kafka.common.errors.WakeupException;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
-import javax.annotation.PreDestroy;
-import java.time.Duration;
-import java.util.*;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.locks.Condition;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
+import java.util.List;
+import java.util.Optional;
-@Slf4j
@RequiredArgsConstructor
-public class EndlessConsumer<K, V> implements Runnable
+@Slf4j
+public class EndlessConsumer<K, V>
{
- private final ExecutorService executor;
private final String id;
- private final String topic;
- private final Consumer<K, V> consumer;
- private final ConsumerRebalanceListener rebalanceListener;
+ private final KafkaListenerEndpointRegistry registry;
+ private final ApplicationErrorHandler errorHandler;
private final RecordHandler<K, V> recordHandler;
- private final Lock lock = new ReentrantLock();
- private final Condition condition = lock.newCondition();
- private boolean running = false;
- private Exception exception;
private long consumed = 0;
-
- @Override
- public void run()
+ @KafkaListener(
+ id = "${spring.kafka.client-id}",
+ idIsGroup = false,
+ topics = "${sumup.adder.topic}",
+ batch = "true",
+ autoStartup = "false")
+ public void accept(List<ConsumerRecord<K, V>> records)
{
- try
- {
- log.info("{} - Subscribing to topic {}", id, topic);
- consumer.subscribe(Arrays.asList(topic), rebalanceListener);
-
- while (true)
- {
- ConsumerRecords<K, V> records =
- consumer.poll(Duration.ofSeconds(1));
-
// Do something with the data...
- log.info("{} - Received {} messages", id, records.count());
+ log.info("{} - Received {} messages", id, records.size());
for (ConsumerRecord<K, V> record : records)
{
log.info(
consumed++;
}
- }
- }
- catch(WakeupException e)
- {
- log.info("{} - RIIING! Request to stop consumption - commiting current offsets!", id);
- consumer.commitSync();
- shutdown();
- }
- catch(RecordDeserializationException e)
- {
- TopicPartition tp = e.topicPartition();
- long offset = e.offset();
- log.error(
- "{} - Could not deserialize message on topic {} with offset={}: {}",
- id,
- tp,
- offset,
- e.getCause().toString());
-
- consumer.commitSync();
- shutdown(e);
- }
- catch(Exception e)
- {
- log.error("{} - Unexpected error: {}", id, e.toString(), e);
- shutdown(e);
- }
- finally
- {
- log.info("{} - Consumer-Thread exiting", id);
- }
- }
-
- private void shutdown()
- {
- shutdown(null);
- }
-
- private void shutdown(Exception e)
- {
- lock.lock();
- try
- {
- try
- {
- log.info("{} - Unsubscribing from topic {}", id, topic);
- consumer.unsubscribe();
- }
- catch (Exception ue)
- {
- log.error(
- "{} - Error while unsubscribing from topic {}: {}",
- id,
- topic,
- ue.toString());
- }
- finally
- {
- running = false;
- exception = e;
- condition.signal();
- }
- }
- finally
- {
- lock.unlock();
- }
}
public void start()
{
- lock.lock();
- try
- {
- if (running)
- throw new IllegalStateException("Consumer instance " + id + " is already running!");
-
- log.info("{} - Starting - consumed {} messages before", id, consumed);
- running = true;
- exception = null;
- executor.submit(this);
- }
- finally
- {
- lock.unlock();
- }
- }
+ if (registry.getListenerContainer(id).isChildRunning())
+ throw new IllegalStateException("Consumer instance " + id + " is already running!");
- public synchronized void stop() throws InterruptedException
- {
- lock.lock();
- try
- {
- if (!running)
- throw new IllegalStateException("Consumer instance " + id + " is not running!");
-
- log.info("{} - Stopping", id);
- consumer.wakeup();
- condition.await();
- log.info("{} - Stopped - consumed {} messages so far", id, consumed);
- }
- finally
- {
- lock.unlock();
- }
+ log.info("{} - Starting ListenerContainer", id);
+ errorHandler.clearState();
+ registry.getListenerContainer(id).start();
}
- @PreDestroy
- public void destroy() throws ExecutionException, InterruptedException
+ public void stop()
{
- log.info("{} - Destroy!", id);
- log.info("{}: Consumed {} messages in total, exiting!", id, consumed);
+ if (!registry.getListenerContainer(id).isChildRunning())
+ throw new IllegalStateException("Consumer instance " + id + " is not running!");
+
+ log.info("{} - Stopping ListenerContainer", id);
+ registry.getListenerContainer(id).stop();
+ log.info("{} - Stopped", id);
}
public boolean running()
{
- lock.lock();
- try
- {
- return running;
- }
- finally
- {
- lock.unlock();
- }
+ return registry.getListenerContainer(id).isRunning();
}
public Optional<Exception> exitStatus()
{
- lock.lock();
- try
- {
- if (running)
- throw new IllegalStateException("No exit-status available: Consumer instance " + id + " is running!");
-
- return Optional.ofNullable(exception);
- }
- finally
- {
- lock.unlock();
- }
+ if (running())
+ throw new IllegalStateException("No exit-status available: Consumer instance " + id + " is running!");
+
+ return errorHandler.getException();
}
}
import com.mongodb.client.MongoClient;
import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.junit.jupiter.api.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
-import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.autoconfigure.mongo.MongoProperties;
import org.springframework.boot.test.autoconfigure.data.mongo.AutoConfigureDataMongo;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
+import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
+import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import static org.awaitility.Awaitility.*;
-@SpringJUnitConfig(
- initializers = ConfigDataApplicationContextInitializer.class,
- classes = {
- KafkaAutoConfiguration.class,
- ApplicationTests.Configuration.class })
+@SpringJUnitConfig(initializers = ConfigDataApplicationContextInitializer.class)
@TestPropertySource(
properties = {
"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
@Autowired
MongoProperties mongoProperties;
@Autowired
+ KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry;
+ @Autowired
TestRecordHandler<K, V> recordHandler;
@Autowired
EndlessConsumer<K, V> endlessConsumer;