This is an automated email from the ASF dual-hosted git repository.

rzo1 pushed a commit to branch 
STORM-4035-Remove-ConfluentAvroSerializer-(storm-hdfs)
in repository https://gitbox.apache.org/repos/asf/storm.git

commit 796b0ead0eb035535282236691fc9f1d826bae4c
Author: Richard Zowalla <[email protected]>
AuthorDate: Mon Feb 26 14:03:14 2024 +0100

    STORM-4035 - Remove ConfluentAvroSerializer (storm-hdfs)
---
 DEPENDENCY-LICENSES                                |  2 -
 THIRD-PARTY.properties                             |  2 -
 external/storm-hdfs/README.md                      |  4 +-
 external/storm-hdfs/pom.xml                        | 23 -------
 .../storm/hdfs/avro/ConfluentAvroSerializer.java   | 77 ----------------------
 5 files changed, 2 insertions(+), 106 deletions(-)

diff --git a/DEPENDENCY-LICENSES b/DEPENDENCY-LICENSES
index edfb00358..7a554a783 100644
--- a/DEPENDENCY-LICENSES
+++ b/DEPENDENCY-LICENSES
@@ -279,8 +279,6 @@ List of third-party dependencies grouped by their license 
type.
         * json-io (com.cedarsoftware:json-io:2.5.1 - 
https://github.com/jdereg/json-io)
         * JSON Small and Fast Parser (net.minidev:json-smart:2.5.0 - 
https://urielch.github.io/)
         * JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:3.2.6 
- http://metrics.dropwizard.io/metrics-jvm/)
-        * kafka-avro-serializer (io.confluent:kafka-avro-serializer:1.0 - 
http://confluent.io/kafka-avro-serializer)
-        * kafka-schema-registry-client 
(io.confluent:kafka-schema-registry-client:1.0 - 
http://confluent.io/kafka-schema-registry-client)
         * Kerb Simple Kdc (org.apache.kerby:kerb-simplekdc:1.0.1 - 
http://directory.apache.org/kerby/kerby-kerb/kerb-simplekdc)
         * Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - 
http://directory.apache.org/kerby/kerby-common/kerby-asn1)
         * Kerby Config (org.apache.kerby:kerby-config:1.0.1 - 
http://directory.apache.org/kerby/kerby-common/kerby-config)
diff --git a/THIRD-PARTY.properties b/THIRD-PARTY.properties
index 511c6c0bd..f0c2251b4 100644
--- a/THIRD-PARTY.properties
+++ b/THIRD-PARTY.properties
@@ -18,8 +18,6 @@
 com.twitter--carbonite--1.5.0=Apache License version 2.0
 commons-beanutils--commons-beanutils--1.7.0=Apache License version 2.0
 commons-logging--commons-logging--1.0.3=Apache License version 2.0
-io.confluent--kafka-avro-serializer--1.0=Apache License version 2.0
-io.confluent--kafka-schema-registry-client--1.0=Apache License version 2.0
 org.apache.zookeeper--zookeeper--3.4.6=Apache License version 2.0
 org.codehaus.jettison--jettison--1.1=Apache License version 2.0
 oro--oro--2.0.8=Apache License version 2.0
diff --git a/external/storm-hdfs/README.md b/external/storm-hdfs/README.md
index 3bf22d056..9eb8b6dfc 100644
--- a/external/storm-hdfs/README.md
+++ b/external/storm-hdfs/README.md
@@ -351,8 +351,8 @@ method is provided for this:
 `AvroUtils.addAvroKryoSerializations(conf);`
 
 By default Storm will use the ```GenericAvroSerializer``` to handle 
serialization.  This will work, but there are much 
-faster options available if you can pre-define the schemas you will be using 
or utilize an external schema registry. An
-implementation using the Confluent Schema Registry is provided, but others can 
be implemented and provided to Storm.
+faster options available if you can pre-define the schemas you will be using 
or utilize an external schema registry.
+
 Please see the javadoc for classes in org.apache.storm.hdfs.avro for 
information about using the built-in options or
 creating your own.
 
diff --git a/external/storm-hdfs/pom.xml b/external/storm-hdfs/pom.xml
index 2ca09f42e..f5dc27fef 100644
--- a/external/storm-hdfs/pom.xml
+++ b/external/storm-hdfs/pom.xml
@@ -255,29 +255,6 @@
             </exclusions>
             <scope>test</scope>
         </dependency>
-        <dependency>
-            <groupId>io.confluent</groupId>
-            <artifactId>kafka-avro-serializer</artifactId>
-            <version>1.0</version>
-            <exclusions>
-               <exclusion>
-                    <groupId>log4j</groupId>
-                    <artifactId>log4j</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-log4j12</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>ch.qos.reload4j</groupId>
-                    <artifactId>reload4j</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-reload4j</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-autocreds</artifactId>
diff --git 
a/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/avro/ConfluentAvroSerializer.java
 
b/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/avro/ConfluentAvroSerializer.java
deleted file mode 100644
index 128d4ffdb..000000000
--- 
a/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/avro/ConfluentAvroSerializer.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.  The 
ASF licenses this file to you under the Apache License, Version
- * 2.0 (the "License"); you may not use this file except in compliance with 
the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software 
distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
See the License for the specific language governing permissions
- * and limitations under the License.
- */
-
-package org.apache.storm.hdfs.avro;
-
-import com.esotericsoftware.kryo.Kryo;
-import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import 
io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
-import java.io.IOException;
-import java.util.Map;
-import org.apache.avro.Schema;
-
-/**
- * This class provides a mechanism to utilize the Confluent Schema Registry 
(https://github.com/confluentinc/schema-registry)
- * for Storm to (de)serialize Avro generic records across a topology.  It 
assumes the schema registry is up and running
- * completely independent of Storm.
- */
-public class ConfluentAvroSerializer extends AbstractAvroSerializer {
-
-    private final String url;
-    private SchemaRegistryClient theClient;
-
-    /**
-     * A constructor for use by test cases ONLY, thus the default scope.
-     * @param url The complete URL reference of a confluent schema registry, 
e.g. "http://HOST:PORT";
-     */
-    ConfluentAvroSerializer(String url) {
-        this.url = url;
-        this.theClient = new CachedSchemaRegistryClient(this.url, 10000);
-    }
-
-    /**
-     * A constructor with a signature that Storm can locate and use with kryo 
registration.
-     * See Storm's SerializationFactory class for details
-     *
-     * @param k Unused but needs to be present for Serialization Factory to 
find this constructor
-     * @param topoConf The global storm configuration. Must define 
"avro.schemaregistry.confluent" to locate the
-     *                  confluent schema registry. Should in the form of 
"http://HOST:PORT";
-     */
-    public ConfluentAvroSerializer(Kryo k, Map<String, Object> topoConf) {
-        url = (String) topoConf.get("avro.schemaregistry.confluent");
-        this.theClient = new CachedSchemaRegistryClient(this.url, 10000);
-    }
-
-    @Override
-    public String getFingerprint(Schema schema) {
-        final String subject = schema.getName();
-        final int guid;
-        try {
-            guid = theClient.register(subject, schema);
-        } catch (IOException | RestClientException e) {
-            throw new RuntimeException(e);
-        }
-        return Integer.toString(guid);
-    }
-
-    @Override
-    public Schema getSchema(String fingerPrint) {
-        final Schema theSchema;
-        try {
-            theSchema = theClient.getByID(Integer.parseInt(fingerPrint));
-        } catch (IOException | RestClientException e) {
-            throw new RuntimeException(e);
-        }
-        return theSchema;
-    }
-}

Reply via email to