This is an automated email from the ASF dual-hosted git repository.

wuchunfu pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-seatunnel.git


The following commit(s) were added to refs/heads/dev by this push:
     new a7ff0f8  [Refactor][connector-spark][elastic] Refactored embedded 
config params and default values (#1492)
a7ff0f8 is described below

commit a7ff0f8805a063a7c7a33f256f82f28235f8e1b1
Author: mans2singh <[email protected]>
AuthorDate: Tue Mar 15 21:50:34 2022 -0400

    [Refactor][connector-spark][elastic] Refactored embedded config params and 
default values (#1492)
---
 .../scala/org/apache/seatunnel/spark/Config.scala  | 57 ++++++++++++++++++++++
 .../seatunnel/spark/sink/Elasticsearch.scala       | 15 +++---
 .../seatunnel/spark/source/Elasticsearch.scala     |  8 +--
 3 files changed, 69 insertions(+), 11 deletions(-)

diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/src/main/scala/org/apache/seatunnel/spark/Config.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/src/main/scala/org/apache/seatunnel/spark/Config.scala
new file mode 100644
index 0000000..5afbee8
--- /dev/null
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/src/main/scala/org/apache/seatunnel/spark/Config.scala
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.seatunnel.spark
+
+/**
+ * Configurations for ElasticSearch source and sink
+ */
+object Config extends Serializable {
+  /**
+   * Elastic search hosts configuration
+   */
+  val HOSTS = "hosts"
+
+  /**
+   * Elastic search index configuration
+   */
+  val INDEX = "index"
+
+  /**
+   * Elastic search index time format configuration
+   */
+  val INDEX_TIME_FORMAT = "index_time_format"
+
+  /**
+   * Elastic search index type configuration
+   */
+  val INDEX_TYPE = "index_type"
+
+  /**
+   * Elastic search default index
+   */
+  val DEFAULT_INDEX = "seatunnel"
+
+  /**
+   * Elastic search default index type
+   */
+  val DEFAULT_INDEX_TYPE = "_doc"
+
+  /**
+   * Elastic search default index time format
+   */
+  val DEFAULT_INDEX_TIME_FORMAT = "yyyy.MM.dd"
+}
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/src/main/scala/org/apache/seatunnel/spark/sink/Elasticsearch.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/src/main/scala/org/apache/seatunnel/spark/sink/Elasticsearch.scala
index dc22f2b..71b06ed 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/src/main/scala/org/apache/seatunnel/spark/sink/Elasticsearch.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/src/main/scala/org/apache/seatunnel/spark/sink/Elasticsearch.scala
@@ -20,6 +20,7 @@ import 
org.apache.seatunnel.common.config.CheckConfigUtil.checkAllExists
 import org.apache.seatunnel.common.config.CheckResult
 import org.apache.seatunnel.common.utils.StringTemplate
 import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory
+import org.apache.seatunnel.spark.Config.{HOSTS, INDEX, INDEX_TYPE, 
INDEX_TIME_FORMAT, DEFAULT_INDEX_TIME_FORMAT, DEFAULT_INDEX, DEFAULT_INDEX_TYPE}
 import org.apache.seatunnel.spark.SparkEnvironment
 import org.apache.seatunnel.spark.batch.SparkBatchSink
 import org.apache.spark.sql.{Dataset, Row}
@@ -37,20 +38,20 @@ class Elasticsearch extends SparkBatchSink {
 
   override def output(df: Dataset[Row], environment: SparkEnvironment): Unit = 
{
     val index =
-      StringTemplate.substitute(config.getString("index"), 
config.getString("index_time_format"))
-    df.saveToEs(index + "/" + config.getString("index_type"), this.esCfg)
+      StringTemplate.substitute(config.getString(INDEX), 
config.getString(INDEX_TIME_FORMAT))
+    df.saveToEs(index + "/" + config.getString(INDEX_TYPE), this.esCfg)
   }
 
   override def checkConfig(): CheckResult = {
-    checkAllExists(config, "hosts")
+    checkAllExists(config, HOSTS)
   }
 
   override def prepare(environment: SparkEnvironment): Unit = {
     val defaultConfig = ConfigFactory.parseMap(
       Map(
-        "index" -> "seatunnel",
-        "index_type" -> "_doc",
-        "index_time_format" -> "yyyy.MM.dd"))
+        INDEX -> DEFAULT_INDEX,
+        INDEX_TYPE -> DEFAULT_INDEX_TYPE,
+        INDEX_TIME_FORMAT -> DEFAULT_INDEX_TIME_FORMAT))
     config = config.withFallback(defaultConfig)
 
     config
@@ -64,7 +65,7 @@ class Elasticsearch extends SparkBatchSink {
         }
       })
 
-    esCfg += ("es.nodes" -> config.getStringList("hosts").mkString(","))
+    esCfg += ("es.nodes" -> config.getStringList(HOSTS).mkString(","))
 
     LOGGER.info("Output ElasticSearch Params:")
     for (entry <- esCfg) {
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/src/main/scala/org/apache/seatunnel/spark/source/Elasticsearch.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/src/main/scala/org/apache/seatunnel/spark/source/Elasticsearch.scala
index 06fc6e9..289fee8 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/src/main/scala/org/apache/seatunnel/spark/source/Elasticsearch.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/src/main/scala/org/apache/seatunnel/spark/source/Elasticsearch.scala
@@ -17,9 +17,9 @@
 package org.apache.seatunnel.spark.source
 
 import scala.collection.JavaConversions._
-
 import org.apache.seatunnel.common.config.{CheckResult, TypesafeConfigUtils}
 import org.apache.seatunnel.common.config.CheckConfigUtil.checkAllExists
+import org.apache.seatunnel.spark.Config.{HOSTS, INDEX}
 import org.apache.seatunnel.spark.SparkEnvironment
 import org.apache.seatunnel.spark.batch.SparkBatchSource
 import org.apache.spark.sql.{Dataset, Row}
@@ -44,7 +44,7 @@ class Elasticsearch extends SparkBatchSource {
         })
     }
 
-    esCfg += ("es.nodes" -> config.getStringList("hosts").mkString(","))
+    esCfg += ("es.nodes" -> config.getStringList(HOSTS).mkString(","))
 
     LOGGER.info("Input ElasticSearch Params:")
     for (entry <- esCfg) {
@@ -54,7 +54,7 @@ class Elasticsearch extends SparkBatchSource {
   }
 
   override def getData(env: SparkEnvironment): Dataset[Row] = {
-    val index = config.getString("index")
+    val index = config.getString(INDEX)
 
     env.getSparkSession.read
       .format("org.elasticsearch.spark.sql")
@@ -63,7 +63,7 @@ class Elasticsearch extends SparkBatchSource {
   }
 
   override def checkConfig(): CheckResult = {
-    checkAllExists(config, "hosts", "index")
+    checkAllExists(config, HOSTS, INDEX)
   }
 
 }

Reply via email to