import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.WakeupException;
-import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import javax.annotation.PreDestroy;
private final KafkaConsumer<Long, String> consumer;
private boolean running = false;
+ Long offset = null;
Future<?> future = null;
props.put("bootstrap.servers", bootstrapServer);
props.put("group.id", groupId);
props.put("client.id", clientId);
- props.put("key.deserializer", LongDeserializer.class.getName());
+ props.put("commit.interval.ms", 500);
+ props.put("key.deserializer", StringDeserializer.class.getName());
props.put("value.deserializer", StringDeserializer.class.getName());
consumer = new KafkaConsumer<>(props);
while (running)
{
+ if (offset != null)
+ {
+ log.info("{} - seeking to offset {}", id, offset);
+ consumer
+ .partitionsFor(topic)
+ .forEach(partition ->
+ consumer.seek(
+ new TopicPartition(topic, partition.partition()),
+ offset));
+ offset = null;
+ }
+
ConsumerRecords<Long, String> records = consumer.poll(Duration.ofSeconds(1));
for (ConsumerRecord<Long, String> record : records)
log.info(
{
log.info("{} - RIIING!", id);
}
+ catch(Exception e)
+ {
+ log.error("{} - Unexpected error: {}", id, e.toString());
+ }
finally
{
log.info("{} - Unsubscribing...", id);
consumer.unsubscribe();
running = false;
+ offset = null;
}
}
+
+ public void seek(long offset)
+ {
+ this.offset = offset;
+ }
+
+
public synchronized void start()
{
if (running)
future.get();
}
+
@PreDestroy
public void destroy() throws ExecutionException, InterruptedException
{