cat s3-sink-connector.json | jq .config | http -v put :8083/connector-plugins/io.confluent.connect.s3.S3SinkConnector/config/validate
echo
-echo "Schreibe Nachrichtem mit Avro-Schema in das Topic \"test\""
+echo "Simuliere einen Producer mit dem Confluent Datagen Connector"
echo
-docker compose -f docker/docker-compose.yml exec -T cli \
- kafka-avro-console-producer \
- --broker-list kafka:9092 \
- --topic test \
- --property value.schema='{"type":"record","name":"myrecord","fields":[{"name":"f1","type":"string"}]}' \
- --property schema.registry.url=http://schema-registry:8085 << EOF
-{"f1":"foo"}
-{"f1":"bar"}
-{"f1":"foofoo"}
-{"f1":"barbar"}
-{"f1":"foobar"}
-{"f1":"barfoo"}
-EOF
+cat datagen-source-connector.json | http -v post :8083/connectors
echo
-echo "Lese Nachrichtem mit Avro-Schema aus dem Topic \"test\""
+echo "Lese probeweise 5 Nachrichten aus den erzeugten Daten ein"
echo
docker compose -f docker/docker-compose.yml exec cli \
kafka-avro-console-consumer \
--bootstrap-server kafka:9092 \
--topic test \
--property schema.registry.url=http://schema-registry:8085 \
- --from-beginning \
--max-messages 5
echo
echo
sleep 10
+echo
+echo "Prüfe den Status des Confluent S3 Sink Connector"
+echo
+http -v :8083/connectors/s3-sink/status
+
+echo
+echo "Entferne datagen-source und s3-sink"
+echo
+http -v delete :8083/connectors/datagen-source
+http -v delete :8083/connectors/s3-sink
+
echo
echo "Liste die im S3-Bucket erzeugten Dateien auf"
echo
--- /dev/null
+{
+ "name": "datagen-source",
+ "config": {
+ "name": "datagen-source",
+ "connector.class": "io.confluent.kafka.connect.datagen.DatagenConnector",
+ "kafka.topic": "test",
+ "schema.string": "{\"type\":\"record\",\"name\":\"myrecord\",\"fields\":[{\"name\":\"f1\",\"type\":\"string\"}]}"
+ }
+}