import de.juplo.kafka.chat.backend.implementation.kafka.messages.data.EventChatMessageReceivedTo;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
@Slf4j
public class DataChannel implements Runnable, ConsumerRebalanceListener
{
+ private final String instanceId;
private final String topic;
private final Producer<String, AbstractMessageTo> producer;
private final Consumer<String, AbstractMessageTo> consumer;
private final long[] currentOffset;
private final long[] nextOffset;
private final Map<UUID, ChatRoomData>[] chatRoomData;
+ private final InfoChannel infoChannel;
+ private final ShardingPublisherStrategy shardingPublisherStrategy;
private boolean running;
@Getter
public DataChannel(
+ String instanceId,
String topic,
Producer<String, AbstractMessageTo> producer,
Consumer<String, AbstractMessageTo> dataChannelConsumer,
ZoneId zoneId,
int numShards,
int bufferSize,
- Clock clock)
+ Clock clock,
+ InfoChannel infoChannel,
+ ShardingPublisherStrategy shardingPublisherStrategy)
{
log.debug(
- "Creating DataChannel for topic {} with {} partitions",
+ "{}: Creating DataChannel for topic {} with {} partitions",
+ instanceId,
topic,
numShards);
+ this.instanceId = instanceId;
this.topic = topic;
this.consumer = dataChannelConsumer;
this.producer = producer;
this.chatRoomData = new Map[numShards];
IntStream
.range(0, numShards)
- .forEach(shard ->
- {
- this.chatRoomData[shard] = new HashMap<>();
- });
+ .forEach(shard -> this.chatRoomData[shard] = new HashMap<>());
+ this.infoChannel = infoChannel;
+ this.shardingPublisherStrategy = shardingPublisherStrategy;
}
producer.send(record, ((metadata, exception) ->
{
- if (metadata != null)
+ if (exception == null)
{
// On successful send
Message message = new Message(key, metadata.offset(), timestamp, text);
currentOffset);
consumer.seek(topicPartition, nextOffset[partition]);
+ infoChannel.sendShardAssignedEvent(partition);
+ shardingPublisherStrategy
+ .publishOwnership(partition)
+ .doOnSuccess(instanceId -> log.info(
+ "Successfully published instance {} as owner of shard {}",
+ instanceId,
+ partition))
+ .doOnError(throwable -> log.error(
+ "Could not publish instance {} as owner of shard {}: {}",
+ instanceId,
+ partition,
+ throwable))
+ .block();
});
consumer.resume(partitions);
{
int partition = topicPartition.partition();
isShardOwned[partition] = false;
+ nextOffset[partition] = consumer.position(topicPartition);
log.info("Partition revoked: {} - next={}", partition, nextOffset[partition]);
+ infoChannel.sendShardRevokedEvent(partition);
});
}
Message.MessageKey key = Message.MessageKey.of(chatMessageTo.getUser(), chatMessageTo.getId());
Message message = new Message(key, offset, timestamp, chatMessageTo.getText());
- ChatRoomData chatRoomData = this.chatRoomData[partition].computeIfAbsent(
- chatRoomId,
- (id) ->
- {
- log.info("Creating ChatRoom {} with buffer-size {}", id, bufferSize);
- KafkaChatMessageService service = new KafkaChatMessageService(this, id);
- return new ChatRoomData(clock, service, bufferSize);
- });
+ ChatRoomData chatRoomData = this
+ .chatRoomData[partition]
+ .computeIfAbsent(chatRoomId, this::computeChatRoomData);
KafkaChatMessageService kafkaChatRoomService =
(KafkaChatMessageService) chatRoomData.getChatRoomService();
+ log.debug(
+ "Loaded message from partition={} at offset={}: {}",
+ partition,
+ offset,
+ message);
kafkaChatRoomService.persistMessage(message);
}
return IntStream
.range(0, numShards)
.filter(shard -> isShardOwned[shard])
- .allMatch(shard -> nextOffset[shard] >= currentOffset[shard]);
+ .allMatch(shard ->
+ {
+ TopicPartition partition = new TopicPartition(topic, shard);
+ long position = consumer.position(partition);
+ return position >= currentOffset[shard];
+ });
}
private void pauseAllOwnedPartions()
if (!isShardOwned[shard])
{
- return Mono.error(new ShardNotOwnedException(shard));
+ return Mono.error(new ShardNotOwnedException(instanceId, shard));
}
- return Mono.justOrEmpty(chatRoomData[shard].get(id));
+ return infoChannel
+ .getChatRoomInfo(id)
+ .map(chatRoomInfo ->
+ chatRoomData[shard].computeIfAbsent(id, this::computeChatRoomData));
+ }
+
+ private ChatRoomData computeChatRoomData(UUID chatRoomId)
+ {
+ log.info("Creating ChatRoom {} with buffer-size {}", chatRoomId, bufferSize);
+ KafkaChatMessageService service = new KafkaChatMessageService(this, chatRoomId);
+ return new ChatRoomData(clock, service, bufferSize);
+ }
+
+ ConsumerGroupMetadata getConsumerGroupMetadata()
+ {
+ return consumer.groupMetadata();
}
}