1 package de.trion.microservices.details;
4 import de.trion.microservices.avro.Order;
5 import static org.springframework.http.MediaType.APPLICATION_JSON;
6 import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
8 import java.util.Properties;
9 import javax.annotation.PostConstruct;
10 import javax.annotation.PreDestroy;
11 import org.apache.kafka.common.serialization.Serdes;
12 import org.apache.kafka.streams.KafkaStreams;
13 import org.apache.kafka.streams.StreamsBuilder;
14 import org.apache.kafka.streams.Topology;
15 import org.apache.kafka.streams.kstream.Materialized;
16 import org.apache.kafka.streams.state.QueryableStoreTypes;
17 import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
18 import org.apache.kafka.streams.state.StreamsMetadata;
19 import org.slf4j.Logger;
20 import org.slf4j.LoggerFactory;
21 import org.springframework.http.HttpStatus;
22 import org.springframework.http.MediaType;
23 import org.springframework.http.ResponseEntity;
24 import org.springframework.web.bind.annotation.GetMapping;
25 import org.springframework.web.bind.annotation.PathVariable;
26 import org.springframework.web.bind.annotation.RestController;
30 public class DetailsService
32 final static Logger LOG = LoggerFactory.getLogger(DetailsService.class);
34 private final String topic;
35 private final String host;
36 private final int port;
37 private final KafkaStreams streams;
39 private ReadOnlyKeyValueStore<String, Order> orders;
42 public DetailsService(ApplicationProperties config)
46 String[] splitted = config.applicationServer.split(":");
48 port = Integer.parseInt(splitted[1]);
50 Properties properties = new Properties();
51 properties.put("bootstrap.servers", config.bootstrapServers);
52 properties.put("application.id", "details");
53 properties.put("application.server", config.applicationServer);
54 properties.put("schema.registry.url", config.schemaRegistryUrl);
55 properties.put("default.key.serde", Serdes.String().getClass());
56 properties.put("default.value.serde", SpecificAvroSerde.class);
58 StreamsBuilder builder = new StreamsBuilder();
59 builder.table(topic, Materialized.as(topic));
61 Topology topology = builder.build();
62 streams = new KafkaStreams(topology, properties);
63 streams.setUncaughtExceptionHandler((Thread t, Throwable e) ->
65 LOG.error("Unexpected error in thread {}: {}", t, e.toString());
72 LOG.error("Could not close KafkaStreams!", ex);
75 streams.setStateListener((newState, oldState) ->
77 if (newState == KafkaStreams.State.RUNNING)
78 orders = streams.store(topic, QueryableStoreTypes.keyValueStore());
84 path = "/orders/{id}",
85 produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
86 public ResponseEntity<?> getOrder(@PathVariable String id)
88 StreamsMetadata metadata = streams.metadataForKey(topic, id, Serdes.String().serializer());
89 LOG.debug("Local store for {}: {}:{}", id, metadata.host(), metadata.port());
91 if (port != metadata.port() || !host.equals(metadata.host()))
93 URI location = URI.create("http://" + metadata.host() + ":" + metadata.port() + "/" + id);
94 LOG.debug("Redirecting to {}", location);
97 .status(HttpStatus.TEMPORARY_REDIRECT)
102 Order order = orders.get(id);
104 ? ResponseEntity.notFound().build()
105 : ResponseEntity.ok().contentType(APPLICATION_JSON).body(order.toString());