package de.juplo.kafka.chat.backend.implementation.kafka;
-import de.juplo.kafka.chat.backend.domain.*;
-import de.juplo.kafka.chat.backend.domain.exceptions.LoadInProgressException;
+import de.juplo.kafka.chat.backend.domain.ChatRoomData;
+import de.juplo.kafka.chat.backend.domain.ChatRoomInfo;
+import de.juplo.kafka.chat.backend.domain.Message;
+import de.juplo.kafka.chat.backend.domain.ShardingPublisherStrategy;
import de.juplo.kafka.chat.backend.domain.exceptions.ShardNotOwnedException;
import de.juplo.kafka.chat.backend.implementation.kafka.messages.AbstractMessageTo;
import de.juplo.kafka.chat.backend.implementation.kafka.messages.data.EventChatMessageReceivedTo;
import lombok.Getter;
+import lombok.ToString;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.Producer;
import reactor.core.publisher.Mono;
import java.time.*;
-import java.util.*;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
import java.util.stream.IntStream;
+@ToString(of = { "topic", "instanceId" })
@Slf4j
-public class DataChannel implements Runnable, ConsumerRebalanceListener
+public class DataChannel implements Channel, ConsumerRebalanceListener
{
private final String instanceId;
private final String topic;
private boolean running;
@Getter
- private volatile boolean loadInProgress;
+ private volatile ChannelState channelState = ChannelState.STARTING;
public DataChannel(
public void onPartitionsAssigned(Collection<TopicPartition> partitions)
{
log.info("Newly assigned partitions! Pausing normal operations...");
- loadInProgress = true;
+ channelState = ChannelState.LOAD_IN_PROGRESS;
consumer.endOffsets(partitions).forEach((topicPartition, currentOffset) ->
{
ConsumerRecords<String, AbstractMessageTo> records = consumer.poll(pollingInterval);
log.info("Fetched {} messages", records.count());
- if (loadInProgress)
+ switch (channelState)
{
- loadChatRoomData(records);
-
- if (isLoadingCompleted())
+ case LOAD_IN_PROGRESS ->
{
- log.info("Loading of messages completed! Pausing all owned partitions...");
- pauseAllOwnedPartions();
- log.info("Resuming normal operations...");
- loadInProgress = false;
+ loadChatRoomData(records);
+
+ if (isLoadingCompleted())
+ {
+ log.info("Loading of messages completed! Pausing all owned partitions...");
+ pauseAllOwnedPartions();
+ log.info("Resuming normal operations...");
+ channelState = ChannelState.READY;
+ }
}
- }
- else
- {
- if (!records.isEmpty())
+ case SHUTTING_DOWN -> log.info("Shutdown in progress: ignoring {} fetched messages.", records.count());
+ default ->
{
- throw new IllegalStateException("All owned partitions should be paused, when no load is in progress!");
+ if (!records.isEmpty())
+ {
+ throw new IllegalStateException("All owned partitions should be paused, when in state " + channelState);
+ }
}
}
}
catch (WakeupException e)
{
log.info("Received WakeupException, exiting!");
+ channelState = ChannelState.SHUTTING_DOWN;
running = false;
}
}
Message.MessageKey key = Message.MessageKey.of(chatMessageTo.getUser(), chatMessageTo.getId());
Message message = new Message(key, offset, timestamp, chatMessageTo.getText());
- ChatRoomData chatRoomData = this
- .chatRoomData[partition]
- .computeIfAbsent(chatRoomId, this::computeChatRoomData);
+ ChatRoomData chatRoomData = computeChatRoomData(chatRoomId, partition);
KafkaChatMessageService kafkaChatRoomService =
(KafkaChatMessageService) chatRoomData.getChatRoomService();
.toArray();
}
+ void createChatRoomData(ChatRoomInfo chatRoomInfo)
+ {
+ computeChatRoomData(chatRoomInfo.getId(), chatRoomInfo.getShard());
+ }
+
Mono<ChatRoomData> getChatRoomData(int shard, UUID id)
{
- if (loadInProgress)
+ ChannelState capturedState = channelState;
+ if (capturedState != ChannelState.READY)
{
- return Mono.error(new LoadInProgressException());
+ return Mono.error(new ChannelNotReadyException(capturedState));
}
if (!isShardOwned[shard])
return Mono.error(new ShardNotOwnedException(instanceId, shard));
}
- return channelMediator
- .getChatRoomInfo(id)
- .map(chatRoomInfo ->
- chatRoomData[shard].computeIfAbsent(id, this::computeChatRoomData));
+ return Mono.justOrEmpty(chatRoomData[shard].get(id));
}
- private ChatRoomData computeChatRoomData(UUID chatRoomId)
+ private ChatRoomData computeChatRoomData(UUID chatRoomId, int shard)
{
- log.info("Creating ChatRoom {} with buffer-size {}", chatRoomId, bufferSize);
- KafkaChatMessageService service = new KafkaChatMessageService(this, chatRoomId);
- return new ChatRoomData(clock, service, bufferSize);
+ ChatRoomData chatRoomData = this.chatRoomData[shard].get(chatRoomId);
+
+ if (chatRoomData != null)
+ {
+ log.info(
+ "Ignoring request to create already existing ChatRoomData for {}",
+ chatRoomId);
+ }
+ else
+ {
+ log.info("Creating ChatRoomData {} with buffer-size {}", chatRoomId, bufferSize);
+ KafkaChatMessageService service = new KafkaChatMessageService(this, chatRoomId);
+ chatRoomData = new ChatRoomData(clock, service, bufferSize);
+ this.chatRoomData[shard].put(chatRoomId, chatRoomData);
+ }
+
+ return chatRoomData;
}
ConsumerGroupMetadata getConsumerGroupMetadata()