import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.state.HostInfo;
+import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.QueryableStoreTypes;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
HostInfo applicationServer,
String usersInputTopic,
String rankingInputTopic,
+ KeyValueBytesStoreSupplier storeSupplier,
ObjectMapper mapper)
{
- Topology topology = buildTopology(usersInputTopic, rankingInputTopic, mapper);
+ Topology topology = buildTopology(
+ usersInputTopic,
+ rankingInputTopic,
+ storeSupplier,
+ mapper);
streams = new KafkaStreams(topology, props);
hostInfo = applicationServer;
storeParameters = StoreQueryParameters.fromNameAndType(STORE_NAME, QueryableStoreTypes.keyValueStore());;
static Topology buildTopology(
String usersInputTopic,
String rankingInputTopic,
+ KeyValueBytesStoreSupplier storeSupplier,
ObjectMapper mapper)
{
StreamsBuilder builder = new StreamsBuilder();
throw new RuntimeException(e);
}
})
- .toTable(Materialized.as(STORE_NAME));
+ .toTable(Materialized.as(storeSupplier));
Topology topology = builder.build();
log.info("\n\n{}", topology.describe());
return topology;
}
+ ReadOnlyKeyValueStore<String, String> getStore()
+ {
+ return streams.store(storeParameters);
+ }
+
public Optional<URI> getRedirect(String username)
{
KeyQueryMetadata metadata = streams.queryMetadataForKey(STORE_NAME, username, Serdes.String().serializer());
{
return
Optional
- .ofNullable(streams.store(storeParameters).get(username))
+ .ofNullable(getStore().get(username))
.map(json ->
{
try