diff --git a/plc4j/integrations/apache-kafka/README.md b/plc4j/integrations/apache-kafka/README.md index e2b17084891..d1ef8014300 100644 --- a/plc4j/integrations/apache-kafka/README.md +++ b/plc4j/integrations/apache-kafka/README.md @@ -17,7 +17,7 @@ :imagesdir: ../../images/ :icons: font -== https://kafka.apache.org/[Apache Kafka] +# [Apache Kafka](https://kafka.apache.org) Apache Kafka is an open-source distributed event streaming platform used by thousands of companies for high-performance data pipelines, streaming analytics, data integration, and @@ -26,9 +26,8 @@ mission-critical applications. # PLC4X Kafka Connectors The PLC4X connectors have the ability to pass data between Kafka and devices using industrial protocols. -They can be built from source from the future 0.8 https://plc4x.apache.org/users/download.html[release] of -PLC4X or from the latest snapshot from https://github.com/apache/plc4x[github]. -//They can also be downloaded from the confluent https://www.confluent.io/hub/[hub]. +They can be built from source from the future [release] of +PLC4X or from the latest snapshot from [github](https://github.com/apache/plc4x). ## Introduction @@ -51,9 +50,11 @@ In order to start a Kafka Connect system the following steps have to be performe 2) Unpack the archive. -3) Copy the `target/plc4j-apache-kafka-0.8.0-uber-jar.jar` to the Kafka `libs` or plugin directory specified +3) Copy the `target/plc4j-apache-kafka-0.10.0-uber-jar.jar` to the Kafka `libs` or plugin directory specified in the config/connect-distributed.properties file. + To access the `target/plc4j-apache-kafka-0.10.0-uber-jar.jar` you can compile the project. + 4) Copy the files in the `config` to Kafka's `config` directory. ### Start a Kafka Broker @@ -234,3 +235,19 @@ between the base schemas. The schemas for the sink and source connectors are the same. This allows us to producer from one PLC and send the data to a sink. + + +### Start with Docker + +If you want to use PLC4X with Kafka on Docker, follow these steps: + +- Download the `docker-compose.yml` file. +- Add the linked images and, if necessary, configure the IP address. You can also change the container port. +- This `docker-compose.yml` file includes four containers: ZooKeeper, Kafka, Kafka Connect, and Control Center. If you do not need the control center, you can delete it. +- Add the `plc4j-apache-kafka-0.10.0-uber-jar.jar` connector to the volume. This is necessary for successfully using the PLC4X connector. + +To start the `docker-compose.yml` file use the following command: + + docker-compose up -d + +[release]: https://github.com/apache/plc4x diff --git a/plc4j/integrations/apache-kafka/docker-compose.yml b/plc4j/integrations/apache-kafka/docker-compose.yml new file mode 100644 index 00000000000..046731fd325 --- /dev/null +++ b/plc4j/integrations/apache-kafka/docker-compose.yml @@ -0,0 +1,89 @@ +services: + zookeeper: + image: # Please added your zookeeper image here + container_name: zookeeper + networks: + - kafka_network + ports: + - 22181:2181 + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + + kafka: + image: # Please added your kafka image here + container_name: kafka + networks: + - kafka_network + depends_on: + - zookeeper + ports: + - 29093:29093 + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_LISTENERS: EXTERNAL_DIFFERENT_HOST://:29093,INTERNAL://:9092 + KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,EXTERNAL_DIFFERENT_HOST://YOUR_IP:29093 #YOUR_IP = It is necessary to enable external access. Please insert your machine's IP. + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL_DIFFERENT_HOST:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + kafka-connect: + image: # Please added your kafka connect image here + container_name: kafka-connect + depends_on: + - zookeeper + - kafka + ports: + - 8083:8083 + environment: + CONNECT_BOOTSTRAP_SERVERS: "kafka:9092" + CONNECT_REST_PORT: 8083 + CONNECT_GROUP_ID: kafka-connect + CONNECT_CONFIG_STORAGE_TOPIC: _connect-configs + CONNECT_OFFSET_STORAGE_TOPIC: _connect-offsets + CONNECT_STATUS_STORAGE_TOPIC: _connect-status + CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter + CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter + CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect" + CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO" + CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR" + CONNECT_LOG4J_APPENDER_STDOUT_LAYOUT_CONVERSIONPATTERN: "[%d] %p %X{connector.context}%m (%c:%L)%n" + CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1" + CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1" + CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1" + CONNECT_PLUGIN_PATH: /usr/share/java,/usr/share/confluent-hub-components,/data/connect-jars + # To add a PLC4X connector to this image, it is necessary to create a volume with the plc4j-apache-kafka-0.10.0-uber-jar.jar file + volumes: + - ./folder-with-PLC4X-connector:/data/connect-jars + command: + - bash + - -c + - | + echo "Launching Kafka Connect worker" + /etc/confluent/docker/run & + # + sleep infinity + networks: + - kafka_network + control-center: + image: # Please added your control center image here + hostname: control-center + depends_on: + - zookeeper + - kafka + - kafka-connect + ports: + - "9021:9021" + environment: + CONTROL_CENTER_BOOTSTRAP_SERVERS: 'kafka:9092' + CONTROL_CENTER_REPLICATION_FACTOR: 1 + CONTROL_CENTER_CONNECT_CLUSTER: http://kafka-connect:8083 + PORT: 9021 + networks: + - kafka_network + +networks: + kafka_network: + name: kafka_docker_net \ No newline at end of file