This is an automated email from the ASF dual-hosted git repository.

kirs pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-seatunnel.git


The following commit(s) were added to refs/heads/dev by this push:
     new 7d36706  [Improve][Core] remove Plugin implementation default override 
method (#1510)
7d36706 is described below

commit 7d36706b6e2d21451d956949c61a7440416d3e3b
Author: TrickyZerg <[email protected]>
AuthorDate: Sat Mar 19 22:16:15 2022 +0800

    [Improve][Core] remove Plugin implementation default override method (#1510)
    
    * Code improve, remove Plugin interface default override method, move it 
into Plugin default method.
    
    * remove unused import
    
    * import CheckResult class
---
 .../src/main/java/org/apache/seatunnel/plugin/Plugin.java      |  7 +++++--
 .../org/apache/seatunnel/flink/batch/FlinkBatchExecution.java  |  9 ---------
 .../apache/seatunnel/flink/stream/FlinkStreamExecution.java    |  9 ---------
 .../org/apache/seatunnel/spark/batch/SparkBatchExecution.java  | 10 ----------
 .../spark/structuredstream/StructuredStreamingExecution.java   | 10 ----------
 .../seatunnel/spark/stream/SparkStreamingExecution.scala       |  4 ----
 .../main/java/org/apache/seatunnel/flink/sink/ConsoleSink.java |  4 ----
 .../org/apache/seatunnel/flink/source/FakeSourceStream.java    | 10 ----------
 .../java/org/apache/seatunnel/flink/source/SocketStream.java   |  6 ------
 .../src/main/scala/org/apache/seatunnel/spark/sink/Email.scala |  2 --
 .../main/scala/org/apache/seatunnel/spark/source/Fake.scala    |  6 ------
 .../main/scala/org/apache/seatunnel/spark/source/File.scala    |  2 --
 .../main/scala/org/apache/seatunnel/spark/source/Hbase.scala   |  2 --
 .../src/main/scala/org/apache/seatunnel/spark/sink/Hive.scala  |  2 --
 .../main/scala/org/apache/seatunnel/spark/source/Hive.scala    |  2 --
 .../main/scala/org/apache/seatunnel/spark/source/Hudi.scala    |  2 --
 .../main/scala/org/apache/seatunnel/spark/source/Iceberg.scala |  1 -
 .../main/scala/org/apache/seatunnel/spark/source/Jdbc.scala    |  2 --
 .../main/scala/org/apache/seatunnel/spark/source/Kudu.scala    |  1 -
 .../scala/org/apache/seatunnel/spark/source/SocketStream.scala |  5 -----
 .../src/main/scala/org/apache/seatunnel/spark/sink/Tidb.scala  |  2 --
 .../apache/seatunnel/flink/transform/DataStreamToTable.java    |  4 ----
 .../main/scala/org/apache/seatunnel/spark/transform/Json.scala |  4 +---
 .../main/scala/org/apache/seatunnel/spark/transform/Sql.scala  |  1 -
 24 files changed, 6 insertions(+), 101 deletions(-)

diff --git 
a/seatunnel-apis/seatunnel-api-base/src/main/java/org/apache/seatunnel/plugin/Plugin.java
 
b/seatunnel-apis/seatunnel-api-base/src/main/java/org/apache/seatunnel/plugin/Plugin.java
index 5f1bc17..32e65e7 100644
--- 
a/seatunnel-apis/seatunnel-api-base/src/main/java/org/apache/seatunnel/plugin/Plugin.java
+++ 
b/seatunnel-apis/seatunnel-api-base/src/main/java/org/apache/seatunnel/plugin/Plugin.java
@@ -35,8 +35,11 @@ public interface Plugin<T extends RuntimeEnv> extends 
Serializable {
 
     Config getConfig();
 
-    CheckResult checkConfig();
+    default CheckResult checkConfig() {
+        return CheckResult.success();
+    }
 
-    void prepare(T prepareEnv);
+    default void prepare(T prepareEnv) {
+    }
 
 }
diff --git 
a/seatunnel-apis/seatunnel-api-flink/src/main/java/org/apache/seatunnel/flink/batch/FlinkBatchExecution.java
 
b/seatunnel-apis/seatunnel-api-flink/src/main/java/org/apache/seatunnel/flink/batch/FlinkBatchExecution.java
index c81a8cc..fac7c7c 100644
--- 
a/seatunnel-apis/seatunnel-api-flink/src/main/java/org/apache/seatunnel/flink/batch/FlinkBatchExecution.java
+++ 
b/seatunnel-apis/seatunnel-api-flink/src/main/java/org/apache/seatunnel/flink/batch/FlinkBatchExecution.java
@@ -17,7 +17,6 @@
 
 package org.apache.seatunnel.flink.batch;
 
-import org.apache.seatunnel.common.config.CheckResult;
 import org.apache.seatunnel.env.Execution;
 import org.apache.seatunnel.flink.FlinkEnvironment;
 import org.apache.seatunnel.flink.util.TableUtil;
@@ -116,12 +115,4 @@ public class FlinkBatchExecution implements 
Execution<FlinkBatchSource, FlinkBat
         return config;
     }
 
-    @Override
-    public CheckResult checkConfig() {
-        return CheckResult.success();
-    }
-
-    @Override
-    public void prepare(FlinkEnvironment prepareEnv) {
-    }
 }
diff --git 
a/seatunnel-apis/seatunnel-api-flink/src/main/java/org/apache/seatunnel/flink/stream/FlinkStreamExecution.java
 
b/seatunnel-apis/seatunnel-api-flink/src/main/java/org/apache/seatunnel/flink/stream/FlinkStreamExecution.java
index 2c5f124..1c2fe8f 100644
--- 
a/seatunnel-apis/seatunnel-api-flink/src/main/java/org/apache/seatunnel/flink/stream/FlinkStreamExecution.java
+++ 
b/seatunnel-apis/seatunnel-api-flink/src/main/java/org/apache/seatunnel/flink/stream/FlinkStreamExecution.java
@@ -17,7 +17,6 @@
 
 package org.apache.seatunnel.flink.stream;
 
-import org.apache.seatunnel.common.config.CheckResult;
 import org.apache.seatunnel.env.Execution;
 import org.apache.seatunnel.flink.FlinkEnvironment;
 import org.apache.seatunnel.flink.util.TableUtil;
@@ -115,12 +114,4 @@ public class FlinkStreamExecution implements 
Execution<FlinkStreamSource, FlinkS
         return config;
     }
 
-    @Override
-    public CheckResult checkConfig() {
-        return CheckResult.success();
-    }
-
-    @Override
-    public void prepare(FlinkEnvironment prepareEnv) {
-    }
 }
diff --git 
a/seatunnel-apis/seatunnel-api-spark/src/main/java/org/apache/seatunnel/spark/batch/SparkBatchExecution.java
 
b/seatunnel-apis/seatunnel-api-spark/src/main/java/org/apache/seatunnel/spark/batch/SparkBatchExecution.java
index 0eb6568..b884293 100644
--- 
a/seatunnel-apis/seatunnel-api-spark/src/main/java/org/apache/seatunnel/spark/batch/SparkBatchExecution.java
+++ 
b/seatunnel-apis/seatunnel-api-spark/src/main/java/org/apache/seatunnel/spark/batch/SparkBatchExecution.java
@@ -17,7 +17,6 @@
 
 package org.apache.seatunnel.spark.batch;
 
-import org.apache.seatunnel.common.config.CheckResult;
 import org.apache.seatunnel.common.config.ConfigRuntimeException;
 import org.apache.seatunnel.env.Execution;
 import org.apache.seatunnel.spark.BaseSparkSink;
@@ -115,13 +114,4 @@ public class SparkBatchExecution implements 
Execution<SparkBatchSource, BaseSpar
         return this.config;
     }
 
-    @Override
-    public CheckResult checkConfig() {
-        return CheckResult.success();
-    }
-
-    @Override
-    public void prepare(SparkEnvironment prepareEnv) {
-
-    }
 }
diff --git 
a/seatunnel-apis/seatunnel-api-spark/src/main/java/org/apache/seatunnel/spark/structuredstream/StructuredStreamingExecution.java
 
b/seatunnel-apis/seatunnel-api-spark/src/main/java/org/apache/seatunnel/spark/structuredstream/StructuredStreamingExecution.java
index 8545b2a..d50f9cc 100644
--- 
a/seatunnel-apis/seatunnel-api-spark/src/main/java/org/apache/seatunnel/spark/structuredstream/StructuredStreamingExecution.java
+++ 
b/seatunnel-apis/seatunnel-api-spark/src/main/java/org/apache/seatunnel/spark/structuredstream/StructuredStreamingExecution.java
@@ -17,7 +17,6 @@
 
 package org.apache.seatunnel.spark.structuredstream;
 
-import org.apache.seatunnel.common.config.CheckResult;
 import org.apache.seatunnel.env.Execution;
 import org.apache.seatunnel.spark.BaseSparkTransform;
 import org.apache.seatunnel.spark.SparkEnvironment;
@@ -52,13 +51,4 @@ public class StructuredStreamingExecution implements 
Execution<StructuredStreami
         return this.config;
     }
 
-    @Override
-    public CheckResult checkConfig() {
-        return CheckResult.success();
-    }
-
-    @Override
-    public void prepare(SparkEnvironment prepareEnv) {
-
-    }
 }
diff --git 
a/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/stream/SparkStreamingExecution.scala
 
b/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/stream/SparkStreamingExecution.scala
index df75334..6687c51 100644
--- 
a/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/stream/SparkStreamingExecution.scala
+++ 
b/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/stream/SparkStreamingExecution.scala
@@ -16,7 +16,6 @@
  */
 package org.apache.seatunnel.spark.stream
 
-import org.apache.seatunnel.common.config.CheckResult
 import org.apache.seatunnel.env.Execution
 import org.apache.seatunnel.plugin.Plugin
 import org.apache.seatunnel.shade.com.typesafe.config.{Config, ConfigFactory}
@@ -75,7 +74,4 @@ class SparkStreamingExecution(sparkEnvironment: 
SparkEnvironment)
 
   override def getConfig: Config = config
 
-  override def checkConfig(): CheckResult = CheckResult.success()
-
-  override def prepare(void: SparkEnvironment): Unit = {}
 }
diff --git 
a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-console/src/main/java/org/apache/seatunnel/flink/sink/ConsoleSink.java
 
b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-console/src/main/java/org/apache/seatunnel/flink/sink/ConsoleSink.java
index 0304088..bcf0872 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-console/src/main/java/org/apache/seatunnel/flink/sink/ConsoleSink.java
+++ 
b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-console/src/main/java/org/apache/seatunnel/flink/sink/ConsoleSink.java
@@ -76,10 +76,6 @@ public class ConsoleSink extends RichOutputFormat<Row> 
implements FlinkBatchSink
     }
 
     @Override
-    public void prepare(FlinkEnvironment env) {
-    }
-
-    @Override
     public void configure(Configuration parameters) {
 
     }
diff --git 
a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-fake/src/main/java/org/apache/seatunnel/flink/source/FakeSourceStream.java
 
b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-fake/src/main/java/org/apache/seatunnel/flink/source/FakeSourceStream.java
index b7915dd..e4b34bd 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-fake/src/main/java/org/apache/seatunnel/flink/source/FakeSourceStream.java
+++ 
b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-fake/src/main/java/org/apache/seatunnel/flink/source/FakeSourceStream.java
@@ -20,7 +20,6 @@ package org.apache.seatunnel.flink.source;
 import static 
org.apache.flink.api.common.typeinfo.BasicTypeInfo.LONG_TYPE_INFO;
 import static 
org.apache.flink.api.common.typeinfo.BasicTypeInfo.STRING_TYPE_INFO;
 
-import org.apache.seatunnel.common.config.CheckResult;
 import org.apache.seatunnel.flink.FlinkEnvironment;
 import org.apache.seatunnel.flink.stream.FlinkStreamSource;
 
@@ -62,15 +61,6 @@ public class FakeSourceStream extends 
RichParallelSourceFunction<Row> implements
         return config;
     }
 
-    @Override
-    public CheckResult checkConfig() {
-        return CheckResult.success();
-    }
-
-    @Override
-    public void prepare(FlinkEnvironment env) {
-    }
-
     private static final String[] NAME_ARRAY = new String[]{"Gary", "Ricky 
Huo", "Kid Xiong"};
 
     @Override
diff --git 
a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-socket/src/main/java/org/apache/seatunnel/flink/source/SocketStream.java
 
b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-socket/src/main/java/org/apache/seatunnel/flink/source/SocketStream.java
index 549bf8d..2ad3f9f 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-socket/src/main/java/org/apache/seatunnel/flink/source/SocketStream.java
+++ 
b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-socket/src/main/java/org/apache/seatunnel/flink/source/SocketStream.java
@@ -17,7 +17,6 @@
 
 package org.apache.seatunnel.flink.source;
 
-import org.apache.seatunnel.common.config.CheckResult;
 import org.apache.seatunnel.flink.FlinkEnvironment;
 import org.apache.seatunnel.flink.stream.FlinkStreamSource;
 
@@ -64,11 +63,6 @@ public class SocketStream implements FlinkStreamSource {
     }
 
     @Override
-    public CheckResult checkConfig() {
-        return CheckResult.success();
-    }
-
-    @Override
     public void prepare(FlinkEnvironment prepareEnv) {
         if (config.hasPath(HOST)) {
             host = config.getString(HOST);
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-email/src/main/scala/org/apache/seatunnel/spark/sink/Email.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-email/src/main/scala/org/apache/seatunnel/spark/sink/Email.scala
index ba1bcc4..a1bfe0e 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-email/src/main/scala/org/apache/seatunnel/spark/sink/Email.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-email/src/main/scala/org/apache/seatunnel/spark/sink/Email.scala
@@ -98,8 +98,6 @@ class Email extends SparkBatchSink {
     CheckConfigUtil.checkAllExists(config, "from", "to", "host", "port", 
"password")
   }
 
-  override def prepare(prepareEnv: SparkEnvironment): Unit = {}
-
   def createMailer(
       host: String,
       port: Int,
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-fake/src/main/scala/org/apache/seatunnel/spark/source/Fake.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-fake/src/main/scala/org/apache/seatunnel/spark/source/Fake.scala
index 8d7f461..4879025 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-fake/src/main/scala/org/apache/seatunnel/spark/source/Fake.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-fake/src/main/scala/org/apache/seatunnel/spark/source/Fake.scala
@@ -16,7 +16,6 @@
  */
 package org.apache.seatunnel.spark.source
 
-import org.apache.seatunnel.common.config.CheckResult
 import org.apache.seatunnel.spark.SparkEnvironment
 import org.apache.seatunnel.spark.batch.SparkBatchSource
 import org.apache.spark.sql.catalyst.encoders.RowEncoder
@@ -25,8 +24,6 @@ import org.apache.spark.sql.{Dataset, Row, RowFactory}
 
 class Fake extends SparkBatchSource {
 
-  override def prepare(env: SparkEnvironment): Unit = {}
-
   override def getData(env: SparkEnvironment): Dataset[Row] = {
 
     val s = Seq(
@@ -40,7 +37,4 @@ class Fake extends SparkBatchSource {
     env.getSparkSession.createDataset(s)(RowEncoder(schema))
   }
 
-  override def checkConfig(): CheckResult = {
-    CheckResult.success()
-  }
 }
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-file/src/main/scala/org/apache/seatunnel/spark/source/File.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-file/src/main/scala/org/apache/seatunnel/spark/source/File.scala
index 7ebb999..2dc6a1c 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-file/src/main/scala/org/apache/seatunnel/spark/source/File.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-file/src/main/scala/org/apache/seatunnel/spark/source/File.scala
@@ -29,8 +29,6 @@ import scala.util.{Failure, Success, Try}
 
 class File extends SparkBatchSource {
 
-  override def prepare(env: SparkEnvironment): Unit = {}
-
   override def checkConfig(): CheckResult = {
     checkAllExists(config, PATH, FORMAT)
   }
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hbase/src/main/scala/org/apache/seatunnel/spark/source/Hbase.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hbase/src/main/scala/org/apache/seatunnel/spark/source/Hbase.scala
index f73aec3..4c47a66 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hbase/src/main/scala/org/apache/seatunnel/spark/source/Hbase.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hbase/src/main/scala/org/apache/seatunnel/spark/source/Hbase.scala
@@ -30,8 +30,6 @@ class Hbase extends SparkBatchSource {
 
   private final val FORMAT_SOURCE = "org.apache.hadoop.hbase.spark"
 
-  override def prepare(env: SparkEnvironment): Unit = {}
-
   override def checkConfig(): CheckResult = {
     checkAllExists(config, "hbase.zookeeper.quorum", "catalog")
   }
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/src/main/scala/org/apache/seatunnel/spark/sink/Hive.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/src/main/scala/org/apache/seatunnel/spark/sink/Hive.scala
index f01dace..45de730 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/src/main/scala/org/apache/seatunnel/spark/sink/Hive.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/src/main/scala/org/apache/seatunnel/spark/sink/Hive.scala
@@ -36,8 +36,6 @@ class Hive extends SparkBatchSink with Logging {
     }
   }
 
-  override def prepare(env: SparkEnvironment): Unit = {}
-
   override def output(df: Dataset[Row], environment: SparkEnvironment): Unit = 
{
     val sparkSession = df.sparkSession
     if (config.hasPath("sql")) {
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/src/main/scala/org/apache/seatunnel/spark/source/Hive.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/src/main/scala/org/apache/seatunnel/spark/source/Hive.scala
index 782e07d..5ba1ae9 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/src/main/scala/org/apache/seatunnel/spark/source/Hive.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/src/main/scala/org/apache/seatunnel/spark/source/Hive.scala
@@ -24,8 +24,6 @@ import org.apache.spark.sql.{Dataset, Row}
 
 class Hive extends SparkBatchSource {
 
-  override def prepare(env: SparkEnvironment): Unit = {}
-
   override def checkConfig(): CheckResult = {
     checkAllExists(config, "pre_sql")
   }
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/source/Hudi.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/source/Hudi.scala
index d4562ea..7eebfaa 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/source/Hudi.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/source/Hudi.scala
@@ -26,8 +26,6 @@ import scala.collection.JavaConversions._
 
 class Hudi extends SparkBatchSource {
 
-  override def prepare(env: SparkEnvironment): Unit = {}
-
   override def checkConfig(): CheckResult = {
     checkAllExists(config, "hoodie.datasource.read.paths")
   }
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-iceberg/src/main/scala/org/apache/seatunnel/spark/source/Iceberg.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-iceberg/src/main/scala/org/apache/seatunnel/spark/source/Iceberg.scala
index e5a60a7..9d14459 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-iceberg/src/main/scala/org/apache/seatunnel/spark/source/Iceberg.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-iceberg/src/main/scala/org/apache/seatunnel/spark/source/Iceberg.scala
@@ -47,5 +47,4 @@ class Iceberg extends SparkBatchSource {
     checkAllExists(config, "path", "pre_sql")
   }
 
-  override def prepare(prepareEnv: SparkEnvironment): Unit = {}
 }
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-jdbc/src/main/scala/org/apache/seatunnel/spark/source/Jdbc.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-jdbc/src/main/scala/org/apache/seatunnel/spark/source/Jdbc.scala
index 965335b..9e4954e 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-jdbc/src/main/scala/org/apache/seatunnel/spark/source/Jdbc.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-jdbc/src/main/scala/org/apache/seatunnel/spark/source/Jdbc.scala
@@ -27,8 +27,6 @@ import org.apache.spark.sql.{DataFrameReader, Dataset, Row, 
SparkSession}
 
 class Jdbc extends SparkBatchSource {
 
-  override def prepare(env: SparkEnvironment): Unit = {}
-
   override def getData(env: SparkEnvironment): Dataset[Row] = {
     jdbcReader(env.getSparkSession, config.getString("driver")).load()
   }
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kudu/src/main/scala/org/apache/seatunnel/spark/source/Kudu.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kudu/src/main/scala/org/apache/seatunnel/spark/source/Kudu.scala
index d3b9a22..d5cb168 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kudu/src/main/scala/org/apache/seatunnel/spark/source/Kudu.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kudu/src/main/scala/org/apache/seatunnel/spark/source/Kudu.scala
@@ -41,5 +41,4 @@ class Kudu extends SparkBatchSource {
     ds
   }
 
-  override def prepare(prepareEnv: SparkEnvironment): Unit = {}
 }
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-socket/src/main/scala/org/apache/seatunnel/spark/source/SocketStream.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-socket/src/main/scala/org/apache/seatunnel/spark/source/SocketStream.scala
index ca94fec..e6b6ba2 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-socket/src/main/scala/org/apache/seatunnel/spark/source/SocketStream.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-socket/src/main/scala/org/apache/seatunnel/spark/source/SocketStream.scala
@@ -18,7 +18,6 @@ package org.apache.seatunnel.spark.source
 
 import scala.collection.JavaConversions._
 
-import org.apache.seatunnel.common.config.CheckResult
 import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory
 import org.apache.seatunnel.spark.SparkEnvironment
 import org.apache.seatunnel.spark.stream.SparkStreamingSource
@@ -41,10 +40,6 @@ class SocketStream extends SparkStreamingSource[String] {
     env.getStreamingContext.socketTextStream(config.getString("host"), 
config.getInt("port"))
   }
 
-  override def checkConfig(): CheckResult = {
-    CheckResult.success()
-  }
-
   override def rdd2dataset(sparkSession: SparkSession, rdd: RDD[String]): 
Dataset[Row] = {
     val rowsRDD = rdd.map(element => {
       RowFactory.create(element)
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-tidb/src/main/scala/org/apache/seatunnel/spark/sink/Tidb.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-tidb/src/main/scala/org/apache/seatunnel/spark/sink/Tidb.scala
index 245ffbe..d8a291d 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-tidb/src/main/scala/org/apache/seatunnel/spark/sink/Tidb.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-tidb/src/main/scala/org/apache/seatunnel/spark/sink/Tidb.scala
@@ -60,6 +60,4 @@ class Tidb extends SparkBatchSink {
     checkAllExists(config, "addr", "port", "database", "table", "user", 
"password")
   }
 
-  override def prepare(prepareEnv: SparkEnvironment): Unit = {
-  }
 }
diff --git 
a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-datastream2table/src/main/java/org/apache/seatunnel/flink/transform/DataStreamToTable.java
 
b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-datastream2table/src/main/java/org/apache/seatunnel/flink/transform/DataStreamToTable.java
index 7ce02aa..834c7c5 100644
--- 
a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-datastream2table/src/main/java/org/apache/seatunnel/flink/transform/DataStreamToTable.java
+++ 
b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-datastream2table/src/main/java/org/apache/seatunnel/flink/transform/DataStreamToTable.java
@@ -63,8 +63,4 @@ public class DataStreamToTable implements 
FlinkStreamTransform, FlinkBatchTransf
     public CheckResult checkConfig() {
         return CheckConfigUtil.checkAllExists(config, 
Plugin.RESULT_TABLE_NAME);
     }
-
-    @Override
-    public void prepare(FlinkEnvironment env) {
-    }
 }
diff --git 
a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-json/src/main/scala/org/apache/seatunnel/spark/transform/Json.scala
 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-json/src/main/scala/org/apache/seatunnel/spark/transform/Json.scala
index 6e4855b..c96f13b 100644
--- 
a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-json/src/main/scala/org/apache/seatunnel/spark/transform/Json.scala
+++ 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-json/src/main/scala/org/apache/seatunnel/spark/transform/Json.scala
@@ -16,7 +16,7 @@
  */
 package org.apache.seatunnel.spark.transform
 
-import org.apache.seatunnel.common.config.{CheckResult, Common, 
ConfigRuntimeException}
+import org.apache.seatunnel.common.config.{Common, ConfigRuntimeException}
 import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory
 import org.apache.seatunnel.spark.{BaseSparkTransform, SparkEnvironment}
 import org.apache.spark.sql.functions._
@@ -91,8 +91,6 @@ class Json extends BaseSparkTransform {
     }
   }
 
-  override def checkConfig(): CheckResult = CheckResult.success()
-
   override def prepare(env: SparkEnvironment): Unit = {
     val defaultConfig = ConfigFactory.parseMap(
       Map(
diff --git 
a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-sql/src/main/scala/org/apache/seatunnel/spark/transform/Sql.scala
 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-sql/src/main/scala/org/apache/seatunnel/spark/transform/Sql.scala
index 917c117..0bfa79f 100644
--- 
a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-sql/src/main/scala/org/apache/seatunnel/spark/transform/Sql.scala
+++ 
b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-sql/src/main/scala/org/apache/seatunnel/spark/transform/Sql.scala
@@ -31,5 +31,4 @@ class Sql extends BaseSparkTransform {
     checkAllExists(config, "sql")
   }
 
-  override def prepare(env: SparkEnvironment): Unit = {}
 }

Reply via email to