X-Git-Url: https://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Fmain%2Fjava%2Fde%2Fjuplo%2Fkafka%2Fchat%2Fbackend%2Fpersistence%2Fkafka%2FKafkaChatHomeService.java;h=a95df543813f3223895b631d2b40e334309c9c1c;hb=220a778c91468046054fac0400ba89825c46b3f5;hp=2e3b42f6f8693339481c8f1f8bbfaf5ef5223433;hpb=3921344eb0c5b56ed571175f5a5b0690b5aa1f85;p=demos%2Fkafka%2Fchat diff --git a/src/main/java/de/juplo/kafka/chat/backend/persistence/kafka/KafkaChatHomeService.java b/src/main/java/de/juplo/kafka/chat/backend/persistence/kafka/KafkaChatHomeService.java index 2e3b42f6..a95df543 100644 --- a/src/main/java/de/juplo/kafka/chat/backend/persistence/kafka/KafkaChatHomeService.java +++ b/src/main/java/de/juplo/kafka/chat/backend/persistence/kafka/KafkaChatHomeService.java @@ -5,76 +5,95 @@ import de.juplo.kafka.chat.backend.domain.ChatRoom; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; +import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.common.TopicPartition; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import java.time.ZoneId; import java.util.*; +import java.util.concurrent.ExecutorService; @Slf4j public class KafkaChatHomeService implements ChatHomeService, ConsumerRebalanceListener { + private final ExecutorService executorService; private final Consumer consumer; + private final Producer producer; private final String topic; - private final long[] offsets; - private final MessageHandler[] handlers; - private final Map[] chatrooms; + private final ZoneId zoneId; + // private final long[] offsets; Erst mal immer alles neu einlesen + private final ChatHomeLoader[] chatHomeLoaders; + private final Map[] chatRoomMaps; public KafkaChatHomeService( + ExecutorService executorService, Consumer consumer, + Producer producer, String topic, + ZoneId zoneId, int numShards) { log.debug("Creating KafkaChatHomeService"); + this.executorService = executorService; this.consumer = consumer; + this.producer = producer; this.topic = topic; - this.offsets = new long[numShards]; - this.handlers = new MessageHandler[numShards]; - for (int i=0; i< numShards; i++) - { - this.offsets[i] = 0l; - this.handlers[i] = new MessageHandler(consumer, new TopicPartition(topic, i)); - } - this.chatrooms = new Map[numShards]; + this.zoneId = zoneId; + // this.offsets = new long[numShards]; + // for (int i=0; i< numShards; i++) + // { + // this.offsets[i] = 0l; + // } + this.chatHomeLoaders = new ChatHomeLoader[numShards]; + this.chatRoomMaps = new Map[numShards]; } @Override public void onPartitionsAssigned(Collection partitions) { - consumer.endOffsets(partitions).forEach((tp, currentOffset) -> + consumer.endOffsets(partitions).forEach((topicPartition, currentOffset) -> { - if (!tp.topic().equals(topic)) + if (!topicPartition.topic().equals(topic)) { - log.warn("Ignoring partition from unwanted topic: {}", tp); + log.warn("Ignoring partition from unwanted topic: {}", topicPartition); return; } - int partition = tp.partition(); - long unseenOffset = offsets[partition]; - - log.info("Reading partition {} from {} -> {}", partition, unseenOffset, currentOffset); - handlers[partition] = new ChatRoomLoadingMessageHandlingStrategy(tp, currentOffset, unseenOffset); + int partition = topicPartition.partition(); + long unseenOffset = 0; // offsets[partition]; + + log.info( + "Loading messages from partition {}: start-offset={} -> current-offset={}", + partition, + unseenOffset, + currentOffset); + + // TODO: reuse! Nicht immer alles neu laden, sondern erst ab offsets[partition]! + consumer.seek(topicPartition, unseenOffset); + chatHomeLoaders[partition] = new ChatHomeLoader( + producer, + currentOffset, + zoneId); }); } @Override public void onPartitionsRevoked(Collection partitions) { - partitions.forEach(tp -> + partitions.forEach(topicPartition -> { - if (!tp.topic().equals(topic)) + if (!topicPartition.topic().equals(topic)) { - log.warn("Ignoring partition from unwanted topic: {}", tp); + log.warn("Ignoring partition from unwanted topic: {}", topicPartition); return; } - int partition = tp.partition(); - long unseenOffset = offsets[partition]; - - log.info("Reading partition {} from {} -> {}", partition, unseenOffset, currentOffset); + int partition = topicPartition.partition(); + // long unseenOffset = offsets[partition]; TODO: Offset merken...? }); log.info("Revoked partitions: {}", partitions); } @@ -82,56 +101,19 @@ public class KafkaChatHomeService implements ChatHomeService, ConsumerRebalanceL @Override public void onPartitionsLost(Collection partitions) { - log.info("Revoked partitions: {}", partitions); - } - - private void foo() - { - Set owned = Arrays - .stream(ownedShards) - .collect( - () -> new HashSet<>(), - (set, i) -> set.add(i), - (a, b) -> a.addAll(b)); - for (int shard = 0; shard < numShards; shard++) - { - chatrooms[shard] = owned.contains(shard) - ? new HashMap<>() - : null; - } - chatroomFlux - .filter(chatRoom -> - { - if (owned.contains(chatRoom.getShard())) - { - return true; - } - else - { - log.info("Ignoring not owned chat-room {}", chatRoom); - return false; - } - }) - .toStream() - .forEach(chatroom -> chatrooms[chatroom.getShard()].put(chatroom.getId(), chatroom)); - } - - @Override - public Mono putChatRoom(ChatRoom chatRoom) - { - chatrooms[chatRoom.getShard()].put(chatRoom.getId(), chatRoom); - return Mono.just(chatRoom); + // TODO: Muss auf den Verlust anders reagiert werden? + onPartitionsRevoked(partitions); } @Override public Mono getChatRoom(int shard, UUID id) { - return Mono.justOrEmpty(chatrooms[shard].get(id)); + return Mono.justOrEmpty(chatRoomMaps[shard].get(id)); } @Override public Flux getChatRooms(int shard) { - return Flux.fromStream(chatrooms[shard].values().stream()); + return Flux.fromStream(chatRoomMaps[shard].values().stream()); } }