import de.juplo.kafka.chat.backend.domain.ChatHome;
import de.juplo.kafka.chat.backend.domain.ShardingStrategy;
import de.juplo.kafka.chat.backend.persistence.KafkaLikeShardingStrategy;
-import jakarta.annotation.PreDestroy;
-import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
+import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.ApplicationArguments;
-import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
-import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;
-import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.time.Clock;
import java.time.ZoneId;
-import java.util.Properties;
-import java.util.concurrent.CompletableFuture;
+import java.util.HashMap;
+import java.util.Map;
@ConditionalOnProperty(
name = "services",
havingValue = "kafka")
@Configuration
-@Slf4j
-public class KafkaServicesConfiguration implements ApplicationRunner
+public class KafkaServicesConfiguration
{
- @Autowired
- ThreadPoolTaskExecutor taskExecutor;
- @Autowired
- ConfigurableApplicationContext context;
-
- @Autowired
- ChatMessageChannel chatMessageChannel;
- @Autowired
- ChatRoomChannel chatRoomChannel;
-
- CompletableFuture<Void> chatRoomChannelConsumerJob;
- CompletableFuture<Void> chatMessageChannelConsumerJob;
-
-
- @Override
- public void run(ApplicationArguments args) throws Exception
- {
- log.info("Starting the consumer for the ChatRoomChannel");
- chatRoomChannelConsumerJob = taskExecutor
- .submitCompletable(chatRoomChannel)
- .exceptionally(e ->
- {
- log.error("The consumer for the ChatRoomChannel exited abnormally!", e);
- return null;
- });
- log.info("Starting the consumer for the ChatMessageChannel");
- chatMessageChannelConsumerJob = taskExecutor
- .submitCompletable(chatMessageChannel)
- .exceptionally(e ->
- {
- log.error("The consumer for the ChatMessageChannel exited abnormally!", e);
- return null;
- });
- }
-
- @PreDestroy
- public void joinChatRoomChannelConsumerJob()
- {
- log.info("Waiting for the consumer of the ChatRoomChannel to finish its work");
- chatRoomChannelConsumerJob.join();
- log.info("Joined the consumer of the ChatRoomChannel");
- }
-
- @PreDestroy
- public void joinChatMessageChannelConsumerJob()
- {
- log.info("Waiting for the consumer of the ChatMessageChannel to finish its work");
- chatMessageChannelConsumerJob.join();
- log.info("Joined the consumer of the ChatMessageChannel");
- }
-
-
@Bean
ChatHome kafkaChatHome(
ShardingStrategy shardingStrategy,
@Bean
Producer<Integer, ChatRoomTo> chatRoomChannelProducer(
- Properties producerProperties,
+ Map<String, Object> defaultProducerProperties,
IntegerSerializer integerSerializer,
JsonSerializer<ChatRoomTo> chatRoomSerializer)
{
return new KafkaProducer<>(
- producerProperties,
+ defaultProducerProperties,
integerSerializer,
chatRoomSerializer);
}
@Bean
Consumer<Integer, ChatRoomTo> chatRoomChannelConsumer(
- Properties producerProperties,
+ Map<String, Object> defaultConsumerProperties,
IntegerDeserializer integerDeserializer,
JsonDeserializer<ChatRoomTo> chatRoomDeserializer)
{
+ Map<String, Object> properties = new HashMap<>(defaultConsumerProperties);
+ properties.put(
+ ConsumerConfig.GROUP_ID_CONFIG,
+ "chat_room_channel");
return new KafkaConsumer<>(
- producerProperties,
+ properties,
integerDeserializer,
chatRoomDeserializer);
}
@Bean
Producer<String, MessageTo> chatMessageChannelProducer(
- Properties producerProperties,
+ Map<String, Object> defaultProducerProperties,
StringSerializer stringSerializer,
JsonSerializer<MessageTo> messageSerializer)
{
return new KafkaProducer<>(
- producerProperties,
+ defaultProducerProperties,
stringSerializer,
messageSerializer);
}
@Bean
Consumer<String, MessageTo> chatMessageChannelConsumer(
- Properties producerProperties,
+ Map<String, Object> defaultConsumerProperties,
StringDeserializer stringDeserializer,
JsonDeserializer<MessageTo> messageDeserializer)
{
+ Map<String, Object> properties = new HashMap<>(defaultConsumerProperties);
+ properties.put(
+ ConsumerConfig.GROUP_ID_CONFIG,
+ "chat_message_channel");
return new KafkaConsumer<>(
- producerProperties,
+ properties,
stringDeserializer,
messageDeserializer);
}
}
@Bean
- Properties producerProperties(ChatBackendProperties chatBackendProperties)
+ Map<String, Object> defaultProducerProperties(ChatBackendProperties chatBackendProperties)
{
- Properties properties = new Properties();
- return properties;
+ return Map.of(
+ ProducerConfig.CLIENT_ID_CONFIG,
+ chatBackendProperties.getKafka().getClientId(),
+ ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
+ chatBackendProperties.getKafka().getBootstrapServers());
}
@Bean
- Properties consumerProperties(ChatBackendProperties chatBackendProperties)
+ Map<String, Object> defaultConsumerProperties(ChatBackendProperties chatBackendProperties)
{
- Properties properties = new Properties();
- return properties;
+ return Map.of(
+ ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
+ chatBackendProperties.getKafka().getBootstrapServers(),
+ ConsumerConfig.CLIENT_ID_CONFIG,
+ chatBackendProperties.getKafka().getClientId(),
+ ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
+ "false",
+ ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
+ "earliest");
}
@Bean