This is an automated email from the ASF dual-hosted git repository.

kirs pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-seatunnel.git


The following commit(s) were added to refs/heads/dev by this push:
     new 402ead2  [Refactor][connector-spark][console] Refactored config 
literal params and default values (#1499)
402ead2 is described below

commit 402ead2a00212b8868b3c9ac5785167ff6cc3198
Author: mans2singh <[email protected]>
AuthorDate: Sat Mar 19 09:06:18 2022 -0400

    [Refactor][connector-spark][console] Refactored config literal params and 
default values (#1499)
    
    * [Refactor][connector-spark][console] Refactored config literal params and 
default values
    
    * [Refactor][connector-spark][console] Added missing lic comment
---
 .../scala/org/apache/seatunnel/spark/Config.scala  | 59 ++++++++++++++++++++++
 .../org/apache/seatunnel/spark/sink/Console.scala  | 19 +++----
 2 files changed, 69 insertions(+), 9 deletions(-)

diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-console/src/main/scala/org/apache/seatunnel/spark/Config.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-console/src/main/scala/org/apache/seatunnel/spark/Config.scala
new file mode 100644
index 0000000..3746ff3
--- /dev/null
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-console/src/main/scala/org/apache/seatunnel/spark/Config.scala
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.seatunnel.spark
+
+/**
+ * Console configuration parameters and defaults
+ */
+object Config {
+
+  /**
+   * The nubmer of rows to show
+   */
+  val LIMIT = "limit"
+
+  /**
+   * The serializer (plain/json/schema)
+   */
+  val SERIALIZER = "serializer"
+
+  /**
+   * Default console show output
+   */
+  val PLAIN = "plain"
+
+  /**
+   * Convert dataframe to json and print
+   */
+  val JSON = "json"
+
+  /**
+   * Print the schema
+   */
+  val SCHEMA = "schema"
+
+  /**
+   * Default serializer
+   */
+  val DEFAULT_SERIALIZER = PLAIN
+
+  /**
+   * Default number of rows
+   */
+  val DEFAULT_LIMIT = 100
+
+}
diff --git 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-console/src/main/scala/org/apache/seatunnel/spark/sink/Console.scala
 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-console/src/main/scala/org/apache/seatunnel/spark/sink/Console.scala
index b0e6607..b77262b 100644
--- 
a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-console/src/main/scala/org/apache/seatunnel/spark/sink/Console.scala
+++ 
b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-console/src/main/scala/org/apache/seatunnel/spark/sink/Console.scala
@@ -18,6 +18,7 @@ package org.apache.seatunnel.spark.sink
 
 import org.apache.seatunnel.common.config.CheckResult
 import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory
+import org.apache.seatunnel.spark.Config.{LIMIT, SERIALIZER, PLAIN, JSON, 
SCHEMA, DEFAULT_SERIALIZER, DEFAULT_LIMIT}
 import org.apache.seatunnel.spark.SparkEnvironment
 import org.apache.seatunnel.spark.batch.SparkBatchSink
 import org.apache.spark.sql.{Dataset, Row}
@@ -27,16 +28,16 @@ import scala.collection.JavaConversions._
 class Console extends SparkBatchSink {
 
   override def output(df: Dataset[Row], env: SparkEnvironment): Unit = {
-    val limit = config.getInt("limit")
+    val limit = config.getInt(LIMIT)
 
-    config.getString("serializer") match {
-      case "plain" =>
+    config.getString(SERIALIZER) match {
+      case PLAIN =>
         if (limit == -1) {
           df.show(Int.MaxValue, truncate = false)
         } else if (limit > 0) {
           df.show(limit, truncate = false)
         }
-      case "json" =>
+      case JSON =>
         if (limit == -1) {
           // scalastyle:off
           df.toJSON.take(Int.MaxValue).foreach(s => println(s))
@@ -46,24 +47,24 @@ class Console extends SparkBatchSink {
           df.toJSON.take(limit).foreach(s => println(s))
           // scalastyle:on
         }
-      case "schema" =>
+      case SCHEMA =>
         df.printSchema()
     }
   }
 
   override def checkConfig(): CheckResult = {
-    if (!config.hasPath("limit") || (config.hasPath("limit") && 
config.getInt("limit") >= -1)) {
+    if (!config.hasPath(LIMIT) || (config.hasPath(LIMIT) && 
config.getInt(LIMIT) >= -1)) {
       CheckResult.success()
     } else {
-      CheckResult.error("please specify [limit] as Number[-1, " + Int.MaxValue 
+ "]")
+      CheckResult.error("Please specify [" + LIMIT + "] as Number[-1, " + 
Int.MaxValue + "]")
     }
   }
 
   override def prepare(env: SparkEnvironment): Unit = {
     val defaultConfig = ConfigFactory.parseMap(
       Map(
-        "limit" -> 100,
-        "serializer" -> "plain" // plain | json
+        LIMIT -> DEFAULT_LIMIT,
+        SERIALIZER -> DEFAULT_SERIALIZER // plain | json
       ))
     config = config.withFallback(defaultConfig)
   }

Reply via email to