This is an automated email from the ASF dual-hosted git repository.

kirs pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-seatunnel.git


The following commit(s) were added to refs/heads/dev by this push:
     new f97ca44  [Bug][Config] Support json format config file (#1500)
f97ca44 is described below

commit f97ca44268f8ab06be9ce7be56a280f62bbf4270
Author: Simon <[email protected]>
AuthorDate: Sat Mar 19 20:51:52 2022 +0800

    [Bug][Config] Support json format config file (#1500)
---
 .../com/typesafe/config/impl/ConfigParser.java     |  2 +-
 .../apache/seatunnel/config/JsonFormatTest.java    | 51 +++++++++++++++
 .../src/test/resources/json/spark.batch.conf       | 72 ++++++++++++++++++++++
 .../src/test/resources/json/spark.batch.json       | 20 ++++++
 4 files changed, 144 insertions(+), 1 deletion(-)

diff --git 
a/seatunnel-config/seatunnel-config-shade/src/main/java/org/apache/seatunnel/shade/com/typesafe/config/impl/ConfigParser.java
 
b/seatunnel-config/seatunnel-config-shade/src/main/java/org/apache/seatunnel/shade/com/typesafe/config/impl/ConfigParser.java
index 6ff0ed1..b6f1d7b 100644
--- 
a/seatunnel-config/seatunnel-config-shade/src/main/java/org/apache/seatunnel/shade/com/typesafe/config/impl/ConfigParser.java
+++ 
b/seatunnel-config/seatunnel-config-shade/src/main/java/org/apache/seatunnel/shade/com/typesafe/config/impl/ConfigParser.java
@@ -111,7 +111,7 @@ final class ConfigParser {
 
                 Path path = pathStack.peekFirst();
 
-                if (path != null
+                if (path != null && !ConfigSyntax.JSON.equals(flavor)
                     && ("source".equals(path.first())
                     || "transform".equals(path.first())
                     || "sink".equals(path.first()))) {
diff --git 
a/seatunnel-config/seatunnel-config-shade/src/test/java/org/apache/seatunnel/config/JsonFormatTest.java
 
b/seatunnel-config/seatunnel-config-shade/src/test/java/org/apache/seatunnel/config/JsonFormatTest.java
new file mode 100644
index 0000000..d16c798
--- /dev/null
+++ 
b/seatunnel-config/seatunnel-config-shade/src/test/java/org/apache/seatunnel/config/JsonFormatTest.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.config;
+
+import org.apache.seatunnel.config.utils.FileUtils;
+
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigResolveOptions;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class JsonFormatTest {
+
+    @Test
+    public void testJsonFormat() {
+
+        Config json = ConfigFactory
+                
.parseFile(FileUtils.getFileFromResources("json/spark.batch.json"))
+                .resolveWith(ConfigFactory.systemProperties(),
+                        
ConfigResolveOptions.defaults().setAllowUnresolved(true));
+
+        Config config = ConfigFactory
+                
.parseFile(FileUtils.getFileFromResources("json/spark.batch.conf"))
+                .resolveWith(ConfigFactory.systemProperties(),
+                        
ConfigResolveOptions.defaults().setAllowUnresolved(true));
+
+        Assert.assertEquals(config.atPath("transform"), 
json.atPath("transform"));
+        Assert.assertEquals(config.atPath("sink"), json.atPath("sink"));
+        Assert.assertEquals(config.atPath("source"), json.atPath("source"));
+        Assert.assertEquals(config.atPath("env"), json.atPath("env"));
+
+    }
+
+}
diff --git 
a/seatunnel-config/seatunnel-config-shade/src/test/resources/json/spark.batch.conf
 
b/seatunnel-config/seatunnel-config-shade/src/test/resources/json/spark.batch.conf
new file mode 100644
index 0000000..27ad42b
--- /dev/null
+++ 
b/seatunnel-config/seatunnel-config-shade/src/test/resources/json/spark.batch.conf
@@ -0,0 +1,72 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+######
+###### This config file is a demonstration of batch processing in SeaTunnel 
config
+######
+
+env {
+  # You can set spark configuration here
+  # see available properties defined by spark: 
https://spark.apache.org/docs/latest/configuration.html#available-properties
+  spark.app.name = "SeaTunnel"
+  spark.executor.instances = 2
+  spark.executor.cores = 1
+  spark.executor.memory = "1g"
+}
+
+source {
+  # This is a example input plugin **only for test and demonstrate the feature 
input plugin**
+  Fake {
+    result_table_name = "my_dataset"
+  }
+
+  # You can also use other input plugins, such as hdfs
+  # hdfs {
+  #   result_table_name = "accesslog"
+  #   path = "hdfs://hadoop-cluster-01/nginx/accesslog"
+  #   format = "json"
+  # }
+
+  # If you would like to get more information about how to configure seatunnel 
and see full list of input plugins,
+  # please go to 
https://seatunnel.apache.org/docs/spark/configuration/source-plugins/Fake
+}
+
+transform {
+  # split data by specific delimiter
+
+  # you can also use other transform plugins, such as sql
+  # sql {
+  #   sql = "select * from accesslog where request_time > 1000"
+  # }
+
+  # If you would like to get more information about how to configure seatunnel 
and see full list of transform plugins,
+  # please go to 
https://seatunnel.apache.org/docs/spark/configuration/transform-plugins/Split
+}
+
+sink {
+  # choose stdout output plugin to output data to console
+  Console {}
+
+  # you can also you other output plugins, such as sql
+  # hdfs {
+  #   path = "hdfs://hadoop-cluster-01/nginx/accesslog_processed"
+  #   save_mode = "append"
+  # }
+
+  # If you would like to get more information about how to configure seatunnel 
and see full list of output plugins,
+  # please go to 
https://seatunnel.apache.org/docs/spark/configuration/sink-plugins/Console
+}
diff --git 
a/seatunnel-config/seatunnel-config-shade/src/test/resources/json/spark.batch.json
 
b/seatunnel-config/seatunnel-config-shade/src/test/resources/json/spark.batch.json
new file mode 100644
index 0000000..f0f68ae
--- /dev/null
+++ 
b/seatunnel-config/seatunnel-config-shade/src/test/resources/json/spark.batch.json
@@ -0,0 +1,20 @@
+{
+  "env" : {
+    "spark.app.name" : "SeaTunnel",
+    "spark.executor.cores" : 1,
+    "spark.executor.instances" : 2,
+    "spark.executor.memory" : "1g"
+  },
+  "sink" : [
+    {
+      "plugin_name" : "Console"
+    }
+  ],
+  "source" : [
+    {
+      "plugin_name" : "Fake",
+      "result_table_name" : "my_dataset"
+    }
+  ],
+  "transform" : []
+}

Reply via email to