This is an automated email from the ASF dual-hosted git repository.

riemer pushed a commit to branch 
3483-use-kraft-based-kafka-as-default-for-internal-messaging
in repository https://gitbox.apache.org/repos/asf/streampipes.git


The following commit(s) were added to 
refs/heads/3483-use-kraft-based-kafka-as-default-for-internal-messaging by this 
push:
     new 2a5f0bdba9 feat(#3483): Use Kraft-based Kafka as default for internal 
messaging
2a5f0bdba9 is described below

commit 2a5f0bdba96e315af2b24e55782ce8952efe24ea
Author: Dominik Riemer <[email protected]>
AuthorDate: Fri Feb 14 21:56:30 2025 +0100

    feat(#3483): Use Kraft-based Kafka as default for internal messaging
---
 docker-compose.yml                                 | 36 ++++-----
 .../deploy/standalone/kafka/docker-compose.dev.yml | 26 ++++---
 .../cli/deploy/standalone/kafka/docker-compose.yml | 31 ++++----
 installer/compose/docker-compose.full.yml          | 41 ++++-------
 installer/compose/docker-compose.quickstart.yml    | 39 ++++------
 installer/compose/docker-compose.yml               | 39 ++++------
 .../model/datalake/DataExplorerWidgetModel.java    |  2 +
 .../model/grounding/KafkaTransportProtocol.java    | 40 ----------
 .../manager/matching/ProtocolSelector.java         |  4 +-
 .../streampipes/manager/matching/v2/TestUtils.java |  2 +-
 .../apache/streampipes/sdk/helpers/Protocols.java  |  2 +-
 .../src/lib/model/gen/streampipes-model.ts         | 85 +---------------------
 12 files changed, 91 insertions(+), 256 deletions(-)

diff --git a/docker-compose.yml b/docker-compose.yml
index 50f7a71506..e15d439227 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -84,33 +84,26 @@ services:
       spnet:
 
   kafka:
-    image: fogsyio/kafka:2.2.0
+    image: bitnami/kafka:3.9.0
     hostname: kafka
-    depends_on:
-      - zookeeper
     environment:
-      # see: https://github.com/confluentinc/schema-registry/issues/648
-      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT
-      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://:9092
-      KAFKA_LISTENERS: PLAINTEXT://:9092
-      KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
-      KAFKA_ADVERTISED_HOST_NAME: kafka
-      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      - KAFKA_CFG_NODE_ID=0
+      - KAFKA_CFG_PROCESS_ROLES=controller,broker
+      - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@kafka:9093
+      - 
KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093,OUTSIDE://:9094
+      - 
KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092,OUTSIDE://localhost:9094
+      - 
KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT
+      - KAFKA_CFG_INTER_BROKER_LISTENER_NAME=PLAINTEXT
+      - KAFKA_CFG_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_FETCH_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_REPLICA_FETCH_MAX_BYTES=10000000
+      - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER
     volumes:
-      - kafka:/kafka
-      - /var/run/docker.sock:/var/run/docker.sock
+      - kafka3:/bitnami
     logging: *default-logging
     networks:
       spnet:
 
-  zookeeper:
-    image: fogsyio/zookeeper:3.4.13
-    logging: *default-logging
-    volumes:
-      - zookeeper:/opt/zookeeper-3.4.13
-    networks:
-      spnet:
-
   influxdb:
     image: influxdb:2.6
     environment:
@@ -144,10 +137,9 @@ services:
       spnet:
 
 volumes:
-  kafka:
+  kafka3:
   files:
   couchdb:
-  zookeeper:
   influxdb:
   influxdb2:
   backend:
diff --git a/installer/cli/deploy/standalone/kafka/docker-compose.dev.yml 
b/installer/cli/deploy/standalone/kafka/docker-compose.dev.yml
index 2410930d61..8ac1f2e8ba 100644
--- a/installer/cli/deploy/standalone/kafka/docker-compose.dev.yml
+++ b/installer/cli/deploy/standalone/kafka/docker-compose.dev.yml
@@ -17,16 +17,18 @@ services:
   kafka:
     ports:
       - "9094:9094"
-    depends_on:
-      - zookeeper
     environment:
-      # see: https://github.com/confluentinc/schema-registry/issues/648
-      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 
PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT
-      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://:9092,OUTSIDE://localhost:9094 # 
 Replace localhost with your external address if Kafka should be reachable from 
external systems.
-      KAFKA_LISTENERS: PLAINTEXT://:9092,OUTSIDE://:9094
-      KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
-      KAFKA_ADVERTISED_HOST_NAME: kafka
-      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
-      KAFKA_MESSAGE_MAX_BYTES: 5000012
-      KAFKA_FETCH_MESSAGE_MAX_BYTES: 5000012
-      KAFKA_REPLICA_FETCH_MAX_BYTES: 10000000
+      # KRaft settings
+      - KAFKA_CFG_NODE_ID=0
+      - KAFKA_CFG_PROCESS_ROLES=controller,broker
+      - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@kafka:9093
+      # Listeners
+      - 
KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093,OUTSIDE://:9094
+      - 
KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092,OUTSIDE://localhost:9094
+      - 
KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT
+      - KAFKA_CFG_INTER_BROKER_LISTENER_NAME=PLAINTEXT
+      - KAFKA_CFG_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_FETCH_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_REPLICA_FETCH_MAX_BYTES=10000000
+      - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER
+
diff --git a/installer/cli/deploy/standalone/kafka/docker-compose.yml 
b/installer/cli/deploy/standalone/kafka/docker-compose.yml
index c3df0c7e0c..a4d689d2e1 100644
--- a/installer/cli/deploy/standalone/kafka/docker-compose.yml
+++ b/installer/cli/deploy/standalone/kafka/docker-compose.yml
@@ -15,24 +15,23 @@
 
 services:
   kafka:
-    image: fogsyio/kafka:2.2.0
+    image: bitnami/kafka:3.9.0
     hostname: kafka
-    depends_on:
-      - zookeeper
     environment:
-      # see: https://github.com/confluentinc/schema-registry/issues/648
-      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT
-      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://:9092
-      KAFKA_LISTENERS: PLAINTEXT://:9092
-      KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
-      KAFKA_ADVERTISED_HOST_NAME: kafka
-      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
-      KAFKA_MESSAGE_MAX_BYTES: 5000012
-      KAFKA_FETCH_MESSAGE_MAX_BYTES: 5000012
-      KAFKA_REPLICA_FETCH_MAX_BYTES: 10000000
+      - KAFKA_CFG_NODE_ID=0
+      - KAFKA_CFG_PROCESS_ROLES=controller,broker
+      - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@kafka:9093
+      # Listeners
+      - KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093
+      - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://:9092
+      - 
KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT
+      - KAFKA_CFG_INTER_BROKER_LISTENER_NAME=PLAINTEXT
+      - KAFKA_CFG_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_FETCH_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_REPLICA_FETCH_MAX_BYTES=10000000
+      - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER
     volumes:
-      - kafka:/kafka
-      - /var/run/docker.sock:/var/run/docker.sock
+      - kafka3:/bitnami
     logging:
       driver: "json-file"
       options:
@@ -42,7 +41,7 @@ services:
       spnet:
 
 volumes:
-  kafka:
+  kafka3:
 
 networks:
   spnet:
diff --git a/installer/compose/docker-compose.full.yml 
b/installer/compose/docker-compose.full.yml
index 2ad26d92cd..47d12fff2e 100644
--- a/installer/compose/docker-compose.full.yml
+++ b/installer/compose/docker-compose.full.yml
@@ -65,32 +65,22 @@ services:
       spnet:
 
   kafka:
-    image: fogsyio/kafka:2.2.0
+    image: bitnami/kafka:3.9.0
     hostname: kafka
-    depends_on:
-      - zookeeper
     environment:
-      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 
PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT
-      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://:9092,OUTSIDE://localhost:9094 # 
 Replace localhost with your external address if Kafka should be reachable from 
external systems.
-      KAFKA_LISTENERS: PLAINTEXT://:9092,OUTSIDE://:9094
-      KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
-      KAFKA_ADVERTISED_HOST_NAME: kafka
-      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
-      KAFKA_MESSAGE_MAX_BYTES: 5000012
-      KAFKA_FETCH_MESSAGE_MAX_BYTES: 5000012
-      KAFKA_REPLICA_FETCH_MAX_BYTES: 10000000
+      - KAFKA_CFG_NODE_ID=0
+      - KAFKA_CFG_PROCESS_ROLES=controller,broker
+      - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@kafka:9093
+      - 
KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093,OUTSIDE://:9094
+      - 
KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092,OUTSIDE://localhost:9094
+      - 
KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT
+      - KAFKA_CFG_INTER_BROKER_LISTENER_NAME=PLAINTEXT
+      - KAFKA_CFG_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_FETCH_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_REPLICA_FETCH_MAX_BYTES=10000000
+      - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER
     volumes:
-      - kafka:/kafka
-      - /var/run/docker.sock:/var/run/docker.sock
-    logging: *default-logging
-    restart: unless-stopped
-    networks:
-      spnet:
-
-  zookeeper:
-    image: fogsyio/zookeeper:3.4.13
-    volumes:
-      - zookeeper:/opt/zookeeper-3.4.13
+      - kafka3:/bitnami
     logging: *default-logging
     restart: unless-stopped
     networks:
@@ -155,14 +145,11 @@ volumes:
   backend:
   connect:
   couchdb:
-  kafka:
-  zookeeper:
+  kafka3:
   influxdb:
   influxdb2:
   files:
   nginx:
-  
-
 
 networks:
   spnet:
diff --git a/installer/compose/docker-compose.quickstart.yml 
b/installer/compose/docker-compose.quickstart.yml
index 175d931f0d..915c7901eb 100644
--- a/installer/compose/docker-compose.quickstart.yml
+++ b/installer/compose/docker-compose.quickstart.yml
@@ -65,32 +65,22 @@ services:
       spnet:
 
   kafka:
-    image: fogsyio/kafka:2.2.0
+    image: bitnami/kafka:3.9.0
     hostname: kafka
-    depends_on:
-      - zookeeper
     environment:
-      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 
PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT
-      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://:9092,OUTSIDE://localhost:9094 # 
 Replace localhost with your external address if Kafka should be reachable from 
external systems.
-      KAFKA_LISTENERS: PLAINTEXT://:9092,OUTSIDE://:9094
-      KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
-      KAFKA_ADVERTISED_HOST_NAME: kafka
-      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
-      KAFKA_MESSAGE_MAX_BYTES: 5000012
-      KAFKA_FETCH_MESSAGE_MAX_BYTES: 5000012
-      KAFKA_REPLICA_FETCH_MAX_BYTES: 10000000
-    volumes:
-      - kafka:/kafka
-      - /var/run/docker.sock:/var/run/docker.sock
-    logging: *default-logging
-    restart: unless-stopped
-    networks:
-      spnet:
-
-  zookeeper:
-    image: fogsyio/zookeeper:3.4.13
+      - KAFKA_CFG_NODE_ID=0
+      - KAFKA_CFG_PROCESS_ROLES=controller,broker
+      - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@kafka:9093
+      - 
KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093,OUTSIDE://:9094
+      - 
KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092,OUTSIDE://localhost:9094
+      - 
KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT
+      - KAFKA_CFG_INTER_BROKER_LISTENER_NAME=PLAINTEXT
+      - KAFKA_CFG_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_FETCH_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_REPLICA_FETCH_MAX_BYTES=10000000
+      - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER
     volumes:
-      - zookeeper:/opt/zookeeper-3.4.13
+      - kafka3:/bitnami
     logging: *default-logging
     restart: unless-stopped
     networks:
@@ -141,8 +131,7 @@ volumes:
   backend:
   connect:
   couchdb:
-  kafka:
-  zookeeper:
+  kafka3:
   influxdb:
   influxdb2:
   files:
diff --git a/installer/compose/docker-compose.yml 
b/installer/compose/docker-compose.yml
index 6157972476..5c8a9e3c77 100644
--- a/installer/compose/docker-compose.yml
+++ b/installer/compose/docker-compose.yml
@@ -65,32 +65,22 @@ services:
       spnet:
 
   kafka:
-    image: fogsyio/kafka:2.2.0
+    image: bitnami/kafka:3.9.0
     hostname: kafka
-    depends_on:
-      - zookeeper
     environment:
-      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 
PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT
-      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://:9092,OUTSIDE://localhost:9094 # 
 Replace localhost with your external address if Kafka should be reachable from 
external systems.
-      KAFKA_LISTENERS: PLAINTEXT://:9092,OUTSIDE://:9094
-      KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
-      KAFKA_ADVERTISED_HOST_NAME: kafka
-      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
-      KAFKA_MESSAGE_MAX_BYTES: 5000012
-      KAFKA_FETCH_MESSAGE_MAX_BYTES: 5000012
-      KAFKA_REPLICA_FETCH_MAX_BYTES: 10000000
-    volumes:
-      - kafka:/kafka
-      - /var/run/docker.sock:/var/run/docker.sock
-    logging: *default-logging
-    restart: unless-stopped
-    networks:
-      spnet:
-
-  zookeeper:
-    image: fogsyio/zookeeper:3.4.13
+      - KAFKA_CFG_NODE_ID=0
+      - KAFKA_CFG_PROCESS_ROLES=controller,broker
+      - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@kafka:9093
+      - 
KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093,OUTSIDE://:9094
+      - 
KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092,OUTSIDE://localhost:9094
+      - 
KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT
+      - KAFKA_CFG_INTER_BROKER_LISTENER_NAME=PLAINTEXT
+      - KAFKA_CFG_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_FETCH_MESSAGE_MAX_BYTES=5000012
+      - KAFKA_CFG_REPLICA_FETCH_MAX_BYTES=10000000
+      - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER
     volumes:
-      - zookeeper:/opt/zookeeper-3.4.13
+      - kafka3:/bitnami
     logging: *default-logging
     restart: unless-stopped
     networks:
@@ -131,8 +121,7 @@ volumes:
   backend:
   connect:
   couchdb:
-  kafka:
-  zookeeper:
+  kafka3:
   influxdb:
   influxdb2:
   files:
diff --git 
a/streampipes-model/src/main/java/org/apache/streampipes/model/datalake/DataExplorerWidgetModel.java
 
b/streampipes-model/src/main/java/org/apache/streampipes/model/datalake/DataExplorerWidgetModel.java
index 02b244d22c..8e302ae163 100644
--- 
a/streampipes-model/src/main/java/org/apache/streampipes/model/datalake/DataExplorerWidgetModel.java
+++ 
b/streampipes-model/src/main/java/org/apache/streampipes/model/datalake/DataExplorerWidgetModel.java
@@ -19,12 +19,14 @@
 package org.apache.streampipes.model.datalake;
 
 import org.apache.streampipes.model.dashboard.DashboardEntity;
+import org.apache.streampipes.model.shared.annotation.TsModel;
 
 import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
 import java.util.HashMap;
 import java.util.Map;
 
+@TsModel
 public class DataExplorerWidgetModel extends DashboardEntity {
 
   private String widgetId;
diff --git 
a/streampipes-model/src/main/java/org/apache/streampipes/model/grounding/KafkaTransportProtocol.java
 
b/streampipes-model/src/main/java/org/apache/streampipes/model/grounding/KafkaTransportProtocol.java
index 9fe5228abe..8af85bdd56 100644
--- 
a/streampipes-model/src/main/java/org/apache/streampipes/model/grounding/KafkaTransportProtocol.java
+++ 
b/streampipes-model/src/main/java/org/apache/streampipes/model/grounding/KafkaTransportProtocol.java
@@ -20,12 +20,6 @@ package org.apache.streampipes.model.grounding;
 
 public class KafkaTransportProtocol extends TransportProtocol {
 
-  private static final long serialVersionUID = -4067982203807146257L;
-
-  private String zookeeperHost;
-
-  private int zookeeperPort;
-
   private int kafkaPort;
 
   private Integer lingerMs;
@@ -44,24 +38,12 @@ public class KafkaTransportProtocol extends 
TransportProtocol {
 
   public KafkaTransportProtocol(String kafkaHost, int kafkaPort, String topic) 
{
     super(kafkaHost, new SimpleTopicDefinition(topic));
-    this.zookeeperHost = kafkaHost;
-    this.zookeeperPort = kafkaPort;
-    this.kafkaPort = kafkaPort;
-  }
-
-  public KafkaTransportProtocol(String kafkaHost, int kafkaPort, String topic, 
String zookeeperHost,
-                                int zookeeperPort) {
-    super(kafkaHost, new SimpleTopicDefinition(topic));
-    this.zookeeperHost = zookeeperHost;
-    this.zookeeperPort = zookeeperPort;
     this.kafkaPort = kafkaPort;
   }
 
   public KafkaTransportProtocol(KafkaTransportProtocol other) {
     super(other);
     this.kafkaPort = other.getKafkaPort();
-    this.zookeeperHost = other.getZookeeperHost();
-    this.zookeeperPort = other.getZookeeperPort();
     this.acks = other.getAcks();
     this.batchSize = other.getBatchSize();
     this.groupId = other.getGroupId();
@@ -74,34 +56,12 @@ public class KafkaTransportProtocol extends 
TransportProtocol {
   public KafkaTransportProtocol(String kafkaHost, Integer kafkaPort, 
WildcardTopicDefinition wildcardTopicDefinition) {
     super(kafkaHost, wildcardTopicDefinition);
     this.kafkaPort = kafkaPort;
-    this.zookeeperHost = kafkaHost;
-    this.zookeeperPort = kafkaPort;
   }
 
   public KafkaTransportProtocol() {
     super();
   }
 
-  public static long getSerialVersionUID() {
-    return serialVersionUID;
-  }
-
-  public String getZookeeperHost() {
-    return zookeeperHost;
-  }
-
-  public void setZookeeperHost(String zookeeperHost) {
-    this.zookeeperHost = zookeeperHost;
-  }
-
-  public int getZookeeperPort() {
-    return zookeeperPort;
-  }
-
-  public void setZookeeperPort(int zookeeperPort) {
-    this.zookeeperPort = zookeeperPort;
-  }
-
   public int getKafkaPort() {
     return kafkaPort;
   }
diff --git 
a/streampipes-pipeline-management/src/main/java/org/apache/streampipes/manager/matching/ProtocolSelector.java
 
b/streampipes-pipeline-management/src/main/java/org/apache/streampipes/manager/matching/ProtocolSelector.java
index e1eb685f6d..e1eeb4f58f 100644
--- 
a/streampipes-pipeline-management/src/main/java/org/apache/streampipes/manager/matching/ProtocolSelector.java
+++ 
b/streampipes-pipeline-management/src/main/java/org/apache/streampipes/manager/matching/ProtocolSelector.java
@@ -114,9 +114,7 @@ public class ProtocolSelector extends GroundingSelector {
     return new KafkaTransportProtocol(
         messagingSettings.getKafkaHost(),
         messagingSettings.getKafkaPort(),
-        outputTopic,
-        messagingSettings.getZookeeperHost(),
-        messagingSettings.getZookeeperPort()
+        outputTopic
     );
   }
 
diff --git 
a/streampipes-pipeline-management/src/test/java/org/apache/streampipes/manager/matching/v2/TestUtils.java
 
b/streampipes-pipeline-management/src/test/java/org/apache/streampipes/manager/matching/v2/TestUtils.java
index 78d291a369..ddfee95cdc 100644
--- 
a/streampipes-pipeline-management/src/test/java/org/apache/streampipes/manager/matching/v2/TestUtils.java
+++ 
b/streampipes-pipeline-management/src/test/java/org/apache/streampipes/manager/matching/v2/TestUtils.java
@@ -25,7 +25,7 @@ import 
org.apache.streampipes.model.grounding.TransportProtocol;
 public class TestUtils {
 
   public static TransportProtocol kafkaProtocol() {
-    return new KafkaTransportProtocol("localhost", 9092, "abc", "localhost", 
2181);
+    return new KafkaTransportProtocol("localhost", 9092, "abc");
   }
 
   public static TransportProtocol jmsProtocol() {
diff --git 
a/streampipes-sdk/src/main/java/org/apache/streampipes/sdk/helpers/Protocols.java
 
b/streampipes-sdk/src/main/java/org/apache/streampipes/sdk/helpers/Protocols.java
index f9c25f11c8..f20e15f40f 100644
--- 
a/streampipes-sdk/src/main/java/org/apache/streampipes/sdk/helpers/Protocols.java
+++ 
b/streampipes-sdk/src/main/java/org/apache/streampipes/sdk/helpers/Protocols.java
@@ -36,7 +36,7 @@ public class Protocols {
    * containing URL and topic where data arrives.
    */
   public static KafkaTransportProtocol kafka(String kafkaHost, Integer 
kafkaPort, String topic) {
-    return new KafkaTransportProtocol(kafkaHost, kafkaPort, topic, kafkaHost, 
kafkaPort);
+    return new KafkaTransportProtocol(kafkaHost, kafkaPort, topic);
   }
 
   /**
diff --git 
a/ui/projects/streampipes/platform-services/src/lib/model/gen/streampipes-model.ts
 
b/ui/projects/streampipes/platform-services/src/lib/model/gen/streampipes-model.ts
index 31f14e6b19..03013586f6 100644
--- 
a/ui/projects/streampipes/platform-services/src/lib/model/gen/streampipes-model.ts
+++ 
b/ui/projects/streampipes/platform-services/src/lib/model/gen/streampipes-model.ts
@@ -20,7 +20,7 @@
 /* tslint:disable */
 /* eslint-disable */
 // @ts-nocheck
-// Generated using typescript-generator version 3.2.1263 on 2025-01-25 
23:02:05.
+// Generated using typescript-generator version 3.2.1263 on 2025-02-14 
21:48:17.
 
 export class NamedStreamPipesEntity implements Storable {
     '@class':
@@ -1188,61 +1188,6 @@ export class DashboardModel implements Storable {
     }
 }
 
-export class DashboardWidgetModel extends DashboardEntity {
-    dashboardWidgetSettings: DashboardWidgetSettings;
-    pipelineId: string;
-    visualizationName: string;
-    widgetId: string;
-    widgetType: string;
-
-    static fromData(
-        data: DashboardWidgetModel,
-        target?: DashboardWidgetModel,
-    ): DashboardWidgetModel {
-        if (!data) {
-            return data;
-        }
-        const instance = target || new DashboardWidgetModel();
-        super.fromData(data, instance);
-        instance.dashboardWidgetSettings = DashboardWidgetSettings.fromData(
-            data.dashboardWidgetSettings,
-        );
-        instance.pipelineId = data.pipelineId;
-        instance.visualizationName = data.visualizationName;
-        instance.widgetId = data.widgetId;
-        instance.widgetType = data.widgetType;
-        return instance;
-    }
-}
-
-export class DashboardWidgetSettings {
-    config: StaticPropertyUnion[];
-    requiredSchema: EventSchema;
-    widgetDescription: string;
-    widgetIconName: string;
-    widgetLabel: string;
-    widgetName: string;
-
-    static fromData(
-        data: DashboardWidgetSettings,
-        target?: DashboardWidgetSettings,
-    ): DashboardWidgetSettings {
-        if (!data) {
-            return data;
-        }
-        const instance = target || new DashboardWidgetSettings();
-        instance.config = __getCopyArrayFn(StaticProperty.fromDataUnion)(
-            data.config,
-        );
-        instance.requiredSchema = EventSchema.fromData(data.requiredSchema);
-        instance.widgetDescription = data.widgetDescription;
-        instance.widgetIconName = data.widgetIconName;
-        instance.widgetLabel = data.widgetLabel;
-        instance.widgetName = data.widgetName;
-        return instance;
-    }
-}
-
 export class DataExplorerWidgetModel extends DashboardEntity {
     baseAppearanceConfig: { [index: string]: any };
     dataConfig: { [index: string]: any };
@@ -2208,8 +2153,6 @@ export class KafkaTransportProtocol extends 
TransportProtocol {
     'maxRequestSize': string;
     'messageMaxBytes': string;
     'offset': string;
-    'zookeeperHost': string;
-    'zookeeperPort': number;
 
     static 'fromData'(
         data: KafkaTransportProtocol,
@@ -2228,8 +2171,6 @@ export class KafkaTransportProtocol extends 
TransportProtocol {
         instance.maxRequestSize = data.maxRequestSize;
         instance.messageMaxBytes = data.messageMaxBytes;
         instance.offset = data.offset;
-        instance.zookeeperHost = data.zookeeperHost;
-        instance.zookeeperPort = data.zookeeperPort;
         return instance;
     }
 }
@@ -4139,30 +4080,6 @@ export class UserInfo {
     }
 }
 
-export class VisualizablePipeline {
-    pipelineId: string;
-    pipelineName: string;
-    schema: EventSchema;
-    topic: string;
-    visualizationName: string;
-
-    static fromData(
-        data: VisualizablePipeline,
-        target?: VisualizablePipeline,
-    ): VisualizablePipeline {
-        if (!data) {
-            return data;
-        }
-        const instance = target || new VisualizablePipeline();
-        instance.pipelineId = data.pipelineId;
-        instance.pipelineName = data.pipelineName;
-        instance.schema = EventSchema.fromData(data.schema);
-        instance.topic = data.topic;
-        instance.visualizationName = data.visualizationName;
-        return instance;
-    }
-}
-
 export class WildcardTopicDefinition extends TopicDefinition {
     '@class': 'org.apache.streampipes.model.grounding.WildcardTopicDefinition';
     'wildcardTopicMappings': WildcardTopicMapping[];

Reply via email to