projects
/
demos
/
kafka
/
training
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
Umstellung des Nachrichten-Datentyps auf Long zurückgenommen
[demos/kafka/training]
/
src
/
main
/
java
/
de
/
juplo
/
kafka
/
ApplicationConfiguration.java
diff --git
a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java
b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java
index
58f44fa
..
08c3955
100644
(file)
--- a/
src/main/java/de/juplo/kafka/ApplicationConfiguration.java
+++ b/
src/main/java/de/juplo/kafka/ApplicationConfiguration.java
@@
-1,14
+1,17
@@
package de.juplo.kafka;
package de.juplo.kafka;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
+import java.time.Clock;
import java.util.Properties;
import java.util.Properties;
+import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Executors;
+import java.util.function.Consumer;
@Configuration
@Configuration
@@
-16,16
+19,38
@@
import java.util.concurrent.Executors;
public class ApplicationConfiguration
{
@Bean
public class ApplicationConfiguration
{
@Bean
- public EndlessConsumer endlessConsumer(
+ public Consumer<ConsumerRecord<String, String>> consumer()
+ {
+ return (record) ->
+ {
+ // Handle record
+ };
+ }
+
+ @Bean
+ public EndlessConsumer<String, String> endlessConsumer(
KafkaConsumer<String, String> kafkaConsumer,
KafkaConsumer<String, String> kafkaConsumer,
+ ExecutorService executor,
+ Consumer<ConsumerRecord<String, String>> handler,
+ PartitionStatisticsRepository repository,
ApplicationProperties properties)
{
return
ApplicationProperties properties)
{
return
- new EndlessConsumer(
- Executors.newFixedThreadPool(1),
+ new EndlessConsumer<>(
+ executor,
+ repository,
properties.getClientId(),
properties.getTopic(),
properties.getClientId(),
properties.getTopic(),
- kafkaConsumer);
+ Clock.systemDefaultZone(),
+ properties.getCommitInterval(),
+ kafkaConsumer,
+ handler);
+ }
+
+ @Bean
+ public ExecutorService executor()
+ {
+ return Executors.newSingleThreadExecutor();
}
@Bean(destroyMethod = "close")
}
@Bean(destroyMethod = "close")
@@
-34,12
+59,14
@@
public class ApplicationConfiguration
Properties props = new Properties();
props.put("bootstrap.servers", properties.getBootstrapServer());
Properties props = new Properties();
props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor");
props.put("group.id", properties.getGroupId());
props.put("client.id", properties.getClientId());
props.put("group.id", properties.getGroupId());
props.put("client.id", properties.getClientId());
+ props.put("enable.auto.commit", false);
props.put("auto.offset.reset", properties.getAutoOffsetReset());
props.put("metadata.max.age.ms", "1000");
props.put("key.deserializer", StringDeserializer.class.getName());
props.put("auto.offset.reset", properties.getAutoOffsetReset());
props.put("metadata.max.age.ms", "1000");
props.put("key.deserializer", StringDeserializer.class.getName());
- props.put("value.deserializer",
Lo
ngDeserializer.class.getName());
+ props.put("value.deserializer",
Stri
ngDeserializer.class.getName());
return new KafkaConsumer<>(props);
}
return new KafkaConsumer<>(props);
}