Die Daten werden im Topic als JSON mit JSON-Schema abgelegt
authorKai Moritz <kai@juplo.de>
Fri, 26 Apr 2024 12:15:12 +0000 (14:15 +0200)
committerKai Moritz <kai@juplo.de>
Sat, 27 Apr 2024 07:44:52 +0000 (09:44 +0200)
README.sh
datagen-source-connector.json
s3-sink-connector.json

index ba773b3..5a0569d 100755 (executable)
--- a/README.sh
+++ b/README.sh
@@ -30,7 +30,7 @@ echo
 echo "Lese probeweise 5 Nachrichten aus den erzeugten Daten ein"
 echo
 docker compose -f docker/docker-compose.yml exec cli \
-  kafka-avro-console-consumer \
+  kafka-console-consumer \
     --bootstrap-server kafka:9092 \
     --topic test \
     --property schema.registry.url=http://schema-registry:8085 \
index e9bc9bc..678a8cf 100644 (file)
@@ -4,6 +4,9 @@
     "name": "datagen-source",
     "connector.class": "io.confluent.kafka.connect.datagen.DatagenConnector",
     "kafka.topic": "test",
+    "value.converter": "io.confluent.connect.json.JsonSchemaConverter",
+    "value.converter.schemas.enable": "true",
+    "value.converter.schema.registry.url": "http://schema-registry:8085",
     "schema.string": "{\"type\":\"record\",\"name\":\"myrecord\",\"fields\":[{\"name\":\"f1\",\"type\":\"string\"}]}"
   }
 }
index 15f3686..98bd525 100644 (file)
@@ -4,6 +4,9 @@
     "name": "s3-sink",
     "connector.class": "io.confluent.connect.s3.S3SinkConnector",
     "topics": "test",
+    "value.converter": "io.confluent.connect.json.JsonSchemaConverter",
+    "value.converter.schemas.enable": "true",
+    "value.converter.schema.registry.url": "http://schema-registry:8085",
     "s3.bucket.name": "juplo",
     "s3.part.size": "5242880",
     "timezone": "Europe/Berlin",