<docker-maven-plugin.version>0.33.0</docker-maven-plugin.version>
<java.version>11</java.version>
<kafka.version>2.8.0</kafka.version>
+ <confluent.version>6.2.1</confluent.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
</dependency>
+ <dependency>
+ <groupId>io.confluent</groupId>
+ <artifactId>kafka-streams-avro-serde</artifactId>
+ <version>${confluent.version}</version>
+ </dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
</plugins>
</build>
+ <repositories>
+ <repository>
+ <id>confluent</id>
+ <url>https://packages.confluent.io/maven/</url>
+ </repository>
+ </repositories>
+
</project>
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
+import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
+import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes;
public final KafkaStreams streams;
public final HostInfo hostInfo;
public final String storeName = "rankingsByUsername";
- public final StoreQueryParameters<ReadOnlyKeyValueStore<String, String>> storeParameters;
- public final ObjectMapper mapper;
+ public final StoreQueryParameters<ReadOnlyKeyValueStore<String, UserRankingTO>> storeParameters;
public QueryStreamProcessor(
{
StreamsBuilder builder = new StreamsBuilder();
- KTable<String, String> users = builder.table(properties.getUsersInputTopic());
- KStream<String, String> rankings = builder.stream(properties.getRankingInputTopic());
+ KTable<String, UserTO> users = builder.table(properties.getUsersInputTopic());
+ KStream<String, RankingTO> rankings = builder.stream(properties.getRankingInputTopic());
rankings
- .join(users, (rankingJson, userJson) ->
- {
- try
- {
- Ranking ranking = mapper.readValue(rankingJson, Ranking.class);
- User user = mapper.readValue(userJson, User.class);
-
- return mapper.writeValueAsString(
- UserRanking.of(
- user.getFirstName(),
- user.getLastName(),
- ranking.getEntries()));
- }
- catch (JsonProcessingException e)
- {
- throw new RuntimeException(e);
- }
- })
+ .join(users, (ranking, user) ->
+ UserRankingTO
+ .newBuilder()
+ .setFirstName(user.getFirstName())
+ .setLastName((user.getLastName()))
+ .setTop10(ranking.getEntries())
+ .build())
.toTable(Materialized.as(storeName));
Properties props = new Properties();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, properties.getApplicationId());
props.put(StreamsConfig.APPLICATION_SERVER_CONFIG, properties.getApplicationServer());
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getBootstrapServer());
- props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
- props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
+ props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
+ props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class);
+ props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "https://schema-registry:9081/");
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streams = new KafkaStreams(builder.build(), props);
hostInfo = HostInfo.buildFromEndpoint(properties.getApplicationServer());
storeParameters = StoreQueryParameters.fromNameAndType(storeName, QueryableStoreTypes.keyValueStore());;
- this.mapper = mapper;
}
public Optional<URI> getRedirect(String username)
return
Optional
.ofNullable(streams.store(storeParameters).get(username))
- .map(json ->
- {
- try
- {
- return mapper.readValue(json, UserRanking.class);
- }
- catch (JsonProcessingException e)
- {
- throw new RuntimeException(e);
- }
- });
+ .map(userRankingTO -> UserRanking.of(
+ userRankingTO.getFirstName().toString(),
+ userRankingTO.getLastName().toString(),
+ userRankingTO
+ .getTop10()
+ .stream()
+ .map(entryTO -> Entry.of(entryTO.getWord().toString(), entryTO.getCount()))
+ .toArray(size -> new Entry[size])));
}
@PostConstruct