summary |
shortlog |
log |
commit | commitdiff |
tree
raw |
patch |
inline | side by side (from parent 1:
57f47fc)
--
works __only__, if a default-type is defined
* The default-type is needed, to deserialized values that are read from the
state-store.
* Without it, the deserialization fails, because not type-information is
available.
* The type-information gets lost, when the values are stored in the state-
store, because the message-headers are _not_ stored along with the value!
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
</dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.springframework.kafka</groupId>
+ <artifactId>spring-kafka</artifactId>
+ </dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>org.springframework.kafka</groupId>
- <artifactId>spring-kafka</artifactId>
- <scope>test</scope>
- </dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
package de.juplo.kafka.wordcount.query;
package de.juplo.kafka.wordcount.query;
-import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
+import org.springframework.kafka.support.serializer.JsonDeserializer;
+import org.springframework.kafka.support.serializer.JsonSerde;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.io.IOException;
import java.net.InetSocketAddress;
Properties props = new Properties();
props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
Properties props = new Properties();
props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
- props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
+ props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class.getName());
+ props.put(JsonDeserializer.VALUE_DEFAULT_TYPE, User.class.getName()); // << Does not work without this!
+ props.put(
+ JsonDeserializer.TYPE_MAPPINGS,
+ "user:" + Key.class.getName() + "," +
+ "ranking:" + Ranking.class.getName() + "," +
+ "userdata:" + User.class.getName() + "," +
+ "userranking:" + UserRanking.class.getName());
HostInfo applicationServer,
QueryApplicationProperties applicationProperties,
KeyValueBytesStoreSupplier storeSupplier,
HostInfo applicationServer,
QueryApplicationProperties applicationProperties,
KeyValueBytesStoreSupplier storeSupplier,
ConfigurableApplicationContext context)
{
QueryStreamProcessor streamProcessor = new QueryStreamProcessor(
ConfigurableApplicationContext context)
{
QueryStreamProcessor streamProcessor = new QueryStreamProcessor(
applicationServer,
applicationProperties.getUsersInputTopic(),
applicationProperties.getRankingInputTopic(),
applicationServer,
applicationProperties.getUsersInputTopic(),
applicationProperties.getRankingInputTopic(),
- storeSupplier,
- mapper);
streamProcessor.streams.setUncaughtExceptionHandler((Throwable e) ->
{
streamProcessor.streams.setUncaughtExceptionHandler((Throwable e) ->
{
package de.juplo.kafka.wordcount.query;
package de.juplo.kafka.wordcount.query;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.PreDestroy;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.PreDestroy;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
+import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.QueryableStoreTypes;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.QueryableStoreTypes;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
+import org.springframework.kafka.support.serializer.JsonSerde;
import java.net.URI;
import java.util.Optional;
import java.net.URI;
import java.util.Optional;
public final KafkaStreams streams;
public final HostInfo hostInfo;
public final KafkaStreams streams;
public final HostInfo hostInfo;
- public final StoreQueryParameters<ReadOnlyKeyValueStore<String, String>> storeParameters;
- public final ObjectMapper mapper;
+ public final StoreQueryParameters<ReadOnlyKeyValueStore<String, UserRanking>> storeParameters;
public QueryStreamProcessor(
public QueryStreamProcessor(
HostInfo applicationServer,
String usersInputTopic,
String rankingInputTopic,
HostInfo applicationServer,
String usersInputTopic,
String rankingInputTopic,
- KeyValueBytesStoreSupplier storeSupplier,
- ObjectMapper mapper)
+ KeyValueBytesStoreSupplier storeSupplier)
{
Topology topology = buildTopology(
usersInputTopic,
rankingInputTopic,
{
Topology topology = buildTopology(
usersInputTopic,
rankingInputTopic,
- storeSupplier,
- mapper);
streams = new KafkaStreams(topology, props);
hostInfo = applicationServer;
storeParameters = StoreQueryParameters.fromNameAndType(STORE_NAME, QueryableStoreTypes.keyValueStore());;
streams = new KafkaStreams(topology, props);
hostInfo = applicationServer;
storeParameters = StoreQueryParameters.fromNameAndType(STORE_NAME, QueryableStoreTypes.keyValueStore());;
}
static Topology buildTopology(
String usersInputTopic,
String rankingInputTopic,
}
static Topology buildTopology(
String usersInputTopic,
String rankingInputTopic,
- KeyValueBytesStoreSupplier storeSupplier,
- ObjectMapper mapper)
+ KeyValueBytesStoreSupplier storeSupplier)
{
StreamsBuilder builder = new StreamsBuilder();
{
StreamsBuilder builder = new StreamsBuilder();
- KTable<String, String> users = builder.table(usersInputTopic);
- KStream<String, String> rankings = builder.stream(rankingInputTopic);
+ KTable<String, User> users = builder.table(usersInputTopic);
+ KStream<String, Ranking> rankings = builder.stream(rankingInputTopic);
- .join(users, (rankingJson, userJson) ->
- {
- try
- {
- Ranking ranking = mapper.readValue(rankingJson, Ranking.class);
- User user = mapper.readValue(userJson, User.class);
-
- return mapper.writeValueAsString(
- UserRanking.of(
- user.getFirstName(),
- user.getLastName(),
- ranking.getEntries()));
- }
- catch (JsonProcessingException e)
- {
- throw new RuntimeException(e);
- }
- })
- .toTable(Materialized.as(storeSupplier));
+ .join(users, (ranking, user) -> UserRanking.of(
+ user.getFirstName(),
+ user.getLastName(),
+ ranking.getEntries()))
+ .toTable(
+ Materialized
+ .<String, UserRanking>as(storeSupplier)
+ .withValueSerde(new JsonSerde().copyWithType(UserRanking.class)));
Topology topology = builder.build();
log.info("\n\n{}", topology.describe());
Topology topology = builder.build();
log.info("\n\n{}", topology.describe());
public Optional<UserRanking> getUserRanking(String username)
{
public Optional<UserRanking> getUserRanking(String username)
{
- return
- Optional
- .ofNullable(getStore().get(username))
- .map(json ->
- {
- try
- {
- return mapper.readValue(json, UserRanking.class);
- }
- catch (JsonProcessingException e)
- {
- throw new RuntimeException(e);
- }
- });
+ return Optional.ofNullable(getStore().get(username));
package de.juplo.kafka.wordcount.query;
package de.juplo.kafka.wordcount.query;
-import com.fasterxml.jackson.databind.ObjectMapper;
import de.juplo.kafka.wordcount.top10.TestRanking;
import de.juplo.kafka.wordcount.users.TestUserData;
import lombok.extern.slf4j.Slf4j;
import de.juplo.kafka.wordcount.top10.TestRanking;
import de.juplo.kafka.wordcount.users.TestUserData;
import lombok.extern.slf4j.Slf4j;
Topology topology = QueryStreamProcessor.buildTopology(
USERS_IN,
TOP10_IN,
Topology topology = QueryStreamProcessor.buildTopology(
USERS_IN,
TOP10_IN,
- Stores.inMemoryKeyValueStore(STORE_NAME),
- new ObjectMapper());
+ Stores.inMemoryKeyValueStore(STORE_NAME));
testDriver = new TopologyTestDriver(topology, serializationConfig());
testDriver = new TopologyTestDriver(topology, serializationConfig());