From 9aa5e741789917ee56b48d4b583926a2b99e03bd Mon Sep 17 00:00:00 2001 From: DanubiaM Date: Fri, 7 Apr 2023 19:14:07 -0400 Subject: [PATCH 1/5] Added docker compose and changed readme --- plc4j/integrations/apache-kafka/README.md | 8 ++ .../apache-kafka/docker-compose.yml | 101 ++++++++++++++++++ 2 files changed, 109 insertions(+) create mode 100644 plc4j/integrations/apache-kafka/docker-compose.yml diff --git a/plc4j/integrations/apache-kafka/README.md b/plc4j/integrations/apache-kafka/README.md index e2b17084891..7af12cffbe6 100644 --- a/plc4j/integrations/apache-kafka/README.md +++ b/plc4j/integrations/apache-kafka/README.md @@ -234,3 +234,11 @@ between the base schemas. The schemas for the sink and source connectors are the same. This allows us to producer from one PLC and send the data to a sink. + + +### Start with Docker +If you want to use PLC4x with Kafka on Docker, simply download the docker-compose.yml file, configure the necessary port and IP settings, and start the containers. The available docker-compose.yml file includes four containers: zookeeper, kafka, kafka connect, and control-center. The control-center container provides a web interface to facilitate the configuration of kafka connect. If you don't want to use it, you can remove it from the docker-compose.yml file. + +To start the docker-compose.yml file, download it and use the following command to start it: + docker-compose up -d + diff --git a/plc4j/integrations/apache-kafka/docker-compose.yml b/plc4j/integrations/apache-kafka/docker-compose.yml new file mode 100644 index 00000000000..25925993a5d --- /dev/null +++ b/plc4j/integrations/apache-kafka/docker-compose.yml @@ -0,0 +1,101 @@ +version: '3' +services: + zookeeper: + image: confluentinc/cp-zookeeper:latest + container_name: zookeeper + networks: + - kafka_network + ports: + - 22181:2181 + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + + kafka: + image: confluentinc/cp-kafka:latest + container_name: kafka + networks: + - kafka_network + depends_on: + - zookeeper + ports: + - 29093:29093 + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_LISTENERS: EXTERNAL_DIFFERENT_HOST://:29093,INTERNAL://:9092 + KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,EXTERNAL_DIFFERENT_HOST://YOUR_IP:29093 #YOUR_IP = It is necessary to enable external access. Please insert your machine's IP. + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL_DIFFERENT_HOST:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + + control-center: + image: confluentinc/cp-enterprise-control-center:6.0.1 + hostname: control-center + depends_on: + - zookeeper + - kafka + - kafka-connect + ports: + - "9021:9021" + environment: + CONTROL_CENTER_BOOTSTRAP_SERVERS: 'kafka:9092' + CONTROL_CENTER_REPLICATION_FACTOR: 1 + CONTROL_CENTER_CONNECT_CLUSTER: http://kafka-connect:8083 + PORT: 9021 + networks: + - kafka_network + + kafka-connect: + image: confluentinc/cp-kafka-connect-base:6.0.0 + container_name: kafka-connect + depends_on: + - zookeeper + - kafka + ports: + - 8083:8083 + environment: + CONNECT_BOOTSTRAP_SERVERS: "kafka:9092" + CONNECT_REST_PORT: 8083 + CONNECT_GROUP_ID: kafka-connect + CONNECT_CONFIG_STORAGE_TOPIC: _connect-configs + CONNECT_OFFSET_STORAGE_TOPIC: _connect-offsets + CONNECT_STATUS_STORAGE_TOPIC: _connect-status + CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter + CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter + CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect" + CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO" + CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR" + CONNECT_LOG4J_APPENDER_STDOUT_LAYOUT_CONVERSIONPATTERN: "[%d] %p %X{connector.context}%m (%c:%L)%n" + CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1" + CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1" + CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1" + # # Optional settings to include to support Confluent Control Center + # CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" + # CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor" + # --------------- + CONNECT_PLUGIN_PATH: /usr/share/java,/usr/share/confluent-hub-components,/data/connect-jars + # If you want to use the Confluent Hub installer to d/l component, but make them available + # when running this offline, spin up the stack once and then run: + # docker cp kafka-connect:/usr/share/confluent-hub-components ./data/connect-jars + volumes: + - $PWD/data:/data + # In the command section, $ are replaced with $$ to avoid the error 'Invalid interpolation format for "command" option' + command: + - bash + - -c + - | + echo "Installing Connector" + confluent-hub install --no-prompt apache/kafka-connect-plc4x-plc4j:0.10.0 + # + echo "Launching Kafka Connect worker" + /etc/confluent/docker/run & + # + sleep infinity + networks: + - kafka_network +networks: + kafka_network: + name: kafka_docker_net \ No newline at end of file From 194f1f9453fc106205ff6efdb0f86ab29dd3e338 Mon Sep 17 00:00:00 2001 From: Danubia Macedo Date: Sun, 8 Oct 2023 22:06:46 -0400 Subject: [PATCH 2/5] removed Confluent references --- .../apache-kafka/docker-compose.yml | 56 ++++++++----------- 1 file changed, 22 insertions(+), 34 deletions(-) diff --git a/plc4j/integrations/apache-kafka/docker-compose.yml b/plc4j/integrations/apache-kafka/docker-compose.yml index 25925993a5d..046731fd325 100644 --- a/plc4j/integrations/apache-kafka/docker-compose.yml +++ b/plc4j/integrations/apache-kafka/docker-compose.yml @@ -1,7 +1,6 @@ -version: '3' services: zookeeper: - image: confluentinc/cp-zookeeper:latest + image: # Please added your zookeeper image here container_name: zookeeper networks: - kafka_network @@ -12,7 +11,7 @@ services: ZOOKEEPER_TICK_TIME: 2000 kafka: - image: confluentinc/cp-kafka:latest + image: # Please added your kafka image here container_name: kafka networks: - kafka_network @@ -28,26 +27,8 @@ services: KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL_DIFFERENT_HOST:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 - - control-center: - image: confluentinc/cp-enterprise-control-center:6.0.1 - hostname: control-center - depends_on: - - zookeeper - - kafka - - kafka-connect - ports: - - "9021:9021" - environment: - CONTROL_CENTER_BOOTSTRAP_SERVERS: 'kafka:9092' - CONTROL_CENTER_REPLICATION_FACTOR: 1 - CONTROL_CENTER_CONNECT_CLUSTER: http://kafka-connect:8083 - PORT: 9021 - networks: - - kafka_network - kafka-connect: - image: confluentinc/cp-kafka-connect-base:6.0.0 + image: # Please added your kafka connect image here container_name: kafka-connect depends_on: - zookeeper @@ -72,30 +53,37 @@ services: CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1" CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1" CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1" - # # Optional settings to include to support Confluent Control Center - # CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" - # CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor" - # --------------- CONNECT_PLUGIN_PATH: /usr/share/java,/usr/share/confluent-hub-components,/data/connect-jars - # If you want to use the Confluent Hub installer to d/l component, but make them available - # when running this offline, spin up the stack once and then run: - # docker cp kafka-connect:/usr/share/confluent-hub-components ./data/connect-jars + # To add a PLC4X connector to this image, it is necessary to create a volume with the plc4j-apache-kafka-0.10.0-uber-jar.jar file volumes: - - $PWD/data:/data - # In the command section, $ are replaced with $$ to avoid the error 'Invalid interpolation format for "command" option' + - ./folder-with-PLC4X-connector:/data/connect-jars command: - bash - -c - | - echo "Installing Connector" - confluent-hub install --no-prompt apache/kafka-connect-plc4x-plc4j:0.10.0 - # echo "Launching Kafka Connect worker" /etc/confluent/docker/run & # sleep infinity networks: - kafka_network + control-center: + image: # Please added your control center image here + hostname: control-center + depends_on: + - zookeeper + - kafka + - kafka-connect + ports: + - "9021:9021" + environment: + CONTROL_CENTER_BOOTSTRAP_SERVERS: 'kafka:9092' + CONTROL_CENTER_REPLICATION_FACTOR: 1 + CONTROL_CENTER_CONNECT_CLUSTER: http://kafka-connect:8083 + PORT: 9021 + networks: + - kafka_network + networks: kafka_network: name: kafka_docker_net \ No newline at end of file From 3cf21c3a905a2b5fe0047c2fb33552215205cd5d Mon Sep 17 00:00:00 2001 From: Danubia Macedo Date: Sun, 8 Oct 2023 22:08:25 -0400 Subject: [PATCH 3/5] fix references --- plc4j/integrations/apache-kafka/README.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/plc4j/integrations/apache-kafka/README.md b/plc4j/integrations/apache-kafka/README.md index 7af12cffbe6..885c571e515 100644 --- a/plc4j/integrations/apache-kafka/README.md +++ b/plc4j/integrations/apache-kafka/README.md @@ -17,7 +17,7 @@ :imagesdir: ../../images/ :icons: font -== https://kafka.apache.org/[Apache Kafka] +# [Apache Kafka](https://kafka.apache.org) Apache Kafka is an open-source distributed event streaming platform used by thousands of companies for high-performance data pipelines, streaming analytics, data integration, and @@ -26,9 +26,8 @@ mission-critical applications. # PLC4X Kafka Connectors The PLC4X connectors have the ability to pass data between Kafka and devices using industrial protocols. -They can be built from source from the future 0.8 https://plc4x.apache.org/users/download.html[release] of -PLC4X or from the latest snapshot from https://github.com/apache/plc4x[github]. -//They can also be downloaded from the confluent https://www.confluent.io/hub/[hub]. +They can be built from source from the future [release] of +PLC4X or from the latest snapshot from [github](https://github.com/apache/plc4x). ## Introduction From 0794cebbd92f1cd88493899d82560eab532b385d Mon Sep 17 00:00:00 2001 From: Danubia Macedo Date: Sun, 8 Oct 2023 22:09:33 -0400 Subject: [PATCH 4/5] additional suggestion on how to get the jar file --- plc4j/integrations/apache-kafka/README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/plc4j/integrations/apache-kafka/README.md b/plc4j/integrations/apache-kafka/README.md index 885c571e515..8e761a420ab 100644 --- a/plc4j/integrations/apache-kafka/README.md +++ b/plc4j/integrations/apache-kafka/README.md @@ -50,9 +50,11 @@ In order to start a Kafka Connect system the following steps have to be performe 2) Unpack the archive. -3) Copy the `target/plc4j-apache-kafka-0.8.0-uber-jar.jar` to the Kafka `libs` or plugin directory specified +3) Copy the `target/plc4j-apache-kafka-0.10.0-uber-jar.jar` to the Kafka `libs` or plugin directory specified in the config/connect-distributed.properties file. + To access the `target/plc4j-apache-kafka-0.10.0-uber-jar.jar` you can compile the project. + 4) Copy the files in the `config` to Kafka's `config` directory. ### Start a Kafka Broker From 962b5485d4b58bd042a1b5a039960ce0f16f6c83 Mon Sep 17 00:00:00 2001 From: Danubia Macedo Date: Sun, 8 Oct 2023 22:10:27 -0400 Subject: [PATCH 5/5] rewritten how to use PLC4X with Docker --- plc4j/integrations/apache-kafka/README.md | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/plc4j/integrations/apache-kafka/README.md b/plc4j/integrations/apache-kafka/README.md index 8e761a420ab..d1ef8014300 100644 --- a/plc4j/integrations/apache-kafka/README.md +++ b/plc4j/integrations/apache-kafka/README.md @@ -238,8 +238,16 @@ data to a sink. ### Start with Docker -If you want to use PLC4x with Kafka on Docker, simply download the docker-compose.yml file, configure the necessary port and IP settings, and start the containers. The available docker-compose.yml file includes four containers: zookeeper, kafka, kafka connect, and control-center. The control-center container provides a web interface to facilitate the configuration of kafka connect. If you don't want to use it, you can remove it from the docker-compose.yml file. -To start the docker-compose.yml file, download it and use the following command to start it: - docker-compose up -d +If you want to use PLC4X with Kafka on Docker, follow these steps: +- Download the `docker-compose.yml` file. +- Add the linked images and, if necessary, configure the IP address. You can also change the container port. +- This `docker-compose.yml` file includes four containers: ZooKeeper, Kafka, Kafka Connect, and Control Center. If you do not need the control center, you can delete it. +- Add the `plc4j-apache-kafka-0.10.0-uber-jar.jar` connector to the volume. This is necessary for successfully using the PLC4X connector. + +To start the `docker-compose.yml` file use the following command: + + docker-compose up -d + +[release]: https://github.com/apache/plc4x