This is an automated email from the ASF dual-hosted git repository.

benjobs pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-streampark.git


The following commit(s) were added to refs/heads/dev by this push:
     new 74c097c2b [Improve] StreamPark Platform configuration files 
improvements (#3679)
74c097c2b is described below

commit 74c097c2b6550a106a657d5d07b6d045c7c6be01
Author: benjobs <[email protected]>
AuthorDate: Thu Apr 25 18:40:14 2024 +0800

    [Improve] StreamPark Platform configuration files improvements (#3679)
    
    * [Improve] StreamPark Platform configuration files improvements
    
    * [Improve] springboot testcase improve
    
    * [Improve] yarnUtils bug fixed
    
    * [Improve] minor improve
    
    ---------
    
    Co-authored-by: benjobs <[email protected]>
---
 .../streampark/common/util/HadoopConfigUtils.scala |  24 ++-
 .../streampark/common/util/HadoopUtils.scala       |  76 +++------
 .../apache/streampark/common/util/YarnUtils.scala  |   2 +-
 .../console/StreamParkConsoleBootstrap.java        |   9 +-
 .../console/base/config/SpringProperties.java      | 171 +++++++++++++++++++++
 .../system/authentication/SsoShiroPlugin.java      |   2 +
 .../console/system/controller/SsoController.java   |   2 +-
 .../src/main/resources/application-h2.yml          |  30 ----
 .../src/main/resources/application-mysql.yml       |  23 ---
 .../src/main/resources/application-pgsql.yml       |  23 ---
 .../src/main/resources/application-sso.yml         |  26 ----
 .../src/main/resources/application.yml             | 147 ------------------
 .../src/main/resources/config.yaml                 |  96 ++++++++++++
 .../console/SpringIntegrationTestBase.java         |  27 +++-
 .../streampark/console/SpringUnitTestBase.java     |  29 +++-
 15 files changed, 370 insertions(+), 317 deletions(-)

diff --git 
a/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopConfigUtils.scala
 
b/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopConfigUtils.scala
index 396a8467e..893167932 100644
--- 
a/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopConfigUtils.scala
+++ 
b/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopConfigUtils.scala
@@ -16,6 +16,8 @@
  */
 package org.apache.streampark.common.util
 
+import org.apache.streampark.common.conf.{CommonConfig, InternalConfigHolder}
+import org.apache.streampark.common.conf.ConfigKeys._
 import org.apache.streampark.common.fs.LfsOperator
 
 import org.apache.commons.io.{FileUtils => ApacheFileUtils}
@@ -32,10 +34,28 @@ import scala.util.{Failure, Success, Try}
 /** Hadoop client configuration tools mainly for flink use. */
 object HadoopConfigUtils {
 
-  private[this] val HADOOP_CLIENT_CONF_FILES: Array[String] =
+  private[this] lazy val kerberosConf: Map[String, String] =
+    System.getProperties.filter(_._1.startsWith("security.kerberos")).toMap
+
+  lazy val hadoopUserName: String =
+    InternalConfigHolder.get(CommonConfig.STREAMPARK_HADOOP_USER_NAME)
+
+  lazy val kerberosDebug = kerberosConf.getOrElse(KEY_SECURITY_KERBEROS_DEBUG, 
"false")
+
+  lazy val kerberosEnable =
+    kerberosConf.getOrElse(KEY_SECURITY_KERBEROS_ENABLE, "false").toBoolean
+
+  lazy val kerberosPrincipal =
+    kerberosConf.getOrElse(KEY_SECURITY_KERBEROS_PRINCIPAL, "").trim
+
+  val kerberosKeytab = kerberosConf.getOrElse(KEY_SECURITY_KERBEROS_KEYTAB, 
"").trim
+
+  val kerberosKrb5 = kerberosConf.getOrElse(KEY_SECURITY_KERBEROS_KRB5_CONF, 
"")
+
+  private val HADOOP_CLIENT_CONF_FILES: Array[String] =
     Array("core-site.xml", "hdfs-site.xml", "yarn-site.xml")
 
-  private[this] val HIVE_CLIENT_CONF_FILES: Array[String] =
+  private val HIVE_CLIENT_CONF_FILES: Array[String] =
     Array("core-site.xml", "hdfs-site.xml", "hive-site.xml")
 
   /** Get Hadoop configuration directory path from system. */
diff --git 
a/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopUtils.scala
 
b/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopUtils.scala
index 93d0573a8..f9f587154 100644
--- 
a/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopUtils.scala
+++ 
b/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopUtils.scala
@@ -17,7 +17,7 @@
 
 package org.apache.streampark.common.util
 
-import org.apache.streampark.common.conf.{CommonConfig, ConfigKeys, 
InternalConfigHolder}
+import org.apache.streampark.common.conf.{CommonConfig, InternalConfigHolder}
 import org.apache.streampark.common.conf.ConfigKeys._
 
 import org.apache.commons.collections.CollectionUtils
@@ -39,7 +39,7 @@ import java.util
 import java.util.{Timer, TimerTask}
 import java.util.concurrent._
 
-import scala.collection.convert.ImplicitConversions._
+import scala.collection.JavaConversions._
 import scala.util.{Failure, Success, Try}
 
 object HadoopUtils extends Logger {
@@ -58,47 +58,15 @@ object HadoopUtils extends Logger {
 
   private[this] var tgt: KerberosTicket = _
 
-  lazy val hadoopUserName: String =
-    InternalConfigHolder.get(CommonConfig.STREAMPARK_HADOOP_USER_NAME)
-
-  private[this] lazy val kerberosConf: Map[String, String] =
-    SystemPropertyUtils.get(ConfigKeys.KEY_APP_HOME, null) match {
-      case null =>
-        getClass.getResourceAsStream("/kerberos.yml") match {
-          case x if x != null => PropertiesUtils.fromYamlFile(x)
-          case _ => null
-        }
-      case f =>
-        val file = new File(s"$f/conf/kerberos.yml")
-        if (file.exists() && file.isFile) {
-          PropertiesUtils.fromYamlFile(file.getAbsolutePath)
-        } else null
-    }
-
-  private[this] lazy val kerberosDebug =
-    kerberosConf.getOrElse(KEY_SECURITY_KERBEROS_DEBUG, "false")
-
-  private[this] lazy val kerberosEnable =
-    kerberosConf.getOrElse(KEY_SECURITY_KERBEROS_ENABLE, "false").toBoolean
-
-  private[this] lazy val kerberosPrincipal =
-    kerberosConf.getOrElse(KEY_SECURITY_KERBEROS_PRINCIPAL, "").trim
-
-  private[this] lazy val kerberosKeytab =
-    kerberosConf.getOrElse(KEY_SECURITY_KERBEROS_KEYTAB, "").trim
-
-  private[this] lazy val kerberosKrb5 =
-    kerberosConf.getOrElse(KEY_SECURITY_KERBEROS_KRB5_CONF, "")
-
   private[this] lazy val configurationCache: util.Map[String, Configuration] =
     new ConcurrentHashMap[String, Configuration]()
 
   def getUgi(): UserGroupInformation = {
     if (ugi == null) {
-      ugi = if (kerberosEnable) {
+      ugi = if (HadoopConfigUtils.kerberosEnable) {
         getKerberosUGI()
       } else {
-        UserGroupInformation.createRemoteUser(hadoopUserName)
+        UserGroupInformation.createRemoteUser(HadoopConfigUtils.hadoopUserName)
       }
     }
     ugi
@@ -121,7 +89,7 @@ object HadoopUtils extends Logger {
         val end = value.getEndTime.getTime
         ((end - start) * 0.90f).toLong
       case _ =>
-        logWarn("Get kerberos tgtRefreshTime failed, try get kerberos.ttl. ")
+        logWarn("get kerberos tgtRefreshTime failed, try get kerberos.ttl. ")
         val timeUnit = 
DateUtils.getTimeUnit(InternalConfigHolder.get(CommonConfig.KERBEROS_TTL))
         timeUnit._2 match {
           case TimeUnit.SECONDS => timeUnit._1 * 1000
@@ -137,10 +105,7 @@ object HadoopUtils extends Logger {
 
   def getConfigurationFromHadoopConfDir(confDir: String = hadoopConfDir): 
Configuration = {
     if (!configurationCache.containsKey(confDir)) {
-      if (!FileUtils.exists(confDir)) {
-        throw new ExceptionInInitializerError(
-          s"[StreamPark] hadoop conf file " + confDir + " is not exist!")
-      }
+      FileUtils.exists(confDir)
       val hadoopConfDir = new File(confDir)
       val confName = List("hdfs-default.xml", "core-site.xml", 
"hdfs-site.xml", "yarn-site.xml")
       val files =
@@ -199,29 +164,32 @@ object HadoopUtils extends Logger {
   }
 
   private[this] def getKerberosUGI(): UserGroupInformation = {
-    logInfo("Kerberos login starting....")
+    logInfo("kerberos login starting....")
 
     require(
-      kerberosPrincipal.nonEmpty && kerberosKeytab.nonEmpty,
-      s"$KEY_SECURITY_KERBEROS_PRINCIPAL and $KEY_SECURITY_KERBEROS_KEYTAB 
must not be empty")
+      HadoopConfigUtils.kerberosPrincipal.nonEmpty && 
HadoopConfigUtils.kerberosKeytab.nonEmpty,
+      s"$KEY_SECURITY_KERBEROS_PRINCIPAL and $KEY_SECURITY_KERBEROS_KEYTAB 
must not be empty"
+    )
 
     System.setProperty("javax.security.auth.useSubjectCredsOnly", "false")
 
-    if (kerberosKrb5.nonEmpty) {
-      System.setProperty("java.security.krb5.conf", kerberosKrb5)
-      System.setProperty("java.security.krb5.conf.path", kerberosKrb5)
+    if (HadoopConfigUtils.kerberosKrb5.nonEmpty) {
+      System.setProperty("java.security.krb5.conf", 
HadoopConfigUtils.kerberosKrb5)
+      System.setProperty("java.security.krb5.conf.path", 
HadoopConfigUtils.kerberosKrb5)
     }
 
-    System.setProperty("sun.security.spnego.debug", kerberosDebug)
-    System.setProperty("sun.security.krb5.debug", kerberosDebug)
+    System.setProperty("sun.security.spnego.debug", 
HadoopConfigUtils.kerberosDebug)
+    System.setProperty("sun.security.krb5.debug", 
HadoopConfigUtils.kerberosDebug)
     hadoopConf.set(KEY_HADOOP_SECURITY_AUTHENTICATION, KEY_KERBEROS)
 
     Try {
       UserGroupInformation.setConfiguration(hadoopConf)
       val ugi =
-        
UserGroupInformation.loginUserFromKeytabAndReturnUGI(kerberosPrincipal, 
kerberosKeytab)
+        UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+          HadoopConfigUtils.kerberosPrincipal,
+          HadoopConfigUtils.kerberosKeytab)
       UserGroupInformation.setLoginUser(ugi)
-      logInfo("Kerberos authentication successful")
+      logInfo("kerberos authentication successful")
       ugi
     } match {
       case Success(ugi) => ugi
@@ -239,9 +207,7 @@ object HadoopUtils extends Logger {
         })
       } match {
         case Success(fs) =>
-          val enableString = 
kerberosConf.getOrElse(KEY_SECURITY_KERBEROS_ENABLE, "false")
-          val kerberosEnable = 
Try(enableString.trim.toBoolean).getOrElse(false)
-          if (kerberosEnable) {
+          if (HadoopConfigUtils.kerberosEnable) {
             // reLogin...
             val timer = new Timer()
             timer.schedule(
@@ -287,7 +253,7 @@ object HadoopUtils extends Logger {
     val tmpDir = FileUtils.createTempDir()
     val fs = FileSystem.get(new Configuration)
     val sourcePath = fs.makeQualified(new Path(jarOnHdfs))
-    if (!fs.exists(sourcePath)) throw new IOException(s"Jar file: $jarOnHdfs 
doesn't exist.")
+    if (!fs.exists(sourcePath)) throw new IOException(s"jar file: $jarOnHdfs 
doesn't exist.")
     val destPath = new Path(tmpDir.getAbsolutePath + "/" + sourcePath.getName)
     fs.copyToLocalFile(sourcePath, destPath)
     new File(destPath.toString).getAbsolutePath
diff --git 
a/streampark-common/src/main/scala/org/apache/streampark/common/util/YarnUtils.scala
 
b/streampark-common/src/main/scala/org/apache/streampark/common/util/YarnUtils.scala
index f0464cfc3..51e2bf606 100644
--- 
a/streampark-common/src/main/scala/org/apache/streampark/common/util/YarnUtils.scala
+++ 
b/streampark-common/src/main/scala/org/apache/streampark/common/util/YarnUtils.scala
@@ -291,7 +291,7 @@ object YarnUtils extends Logger {
       val url =
         if (!hasYarnHttpSimpleAuth) reqUrl
         else {
-          s"$reqUrl?user.name=${HadoopUtils.hadoopUserName}"
+          s"$reqUrl?user.name=${HadoopConfigUtils.hadoopUserName}"
         }
       HttpClientUtils.httpGetRequest(url, config)
     }
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/StreamParkConsoleBootstrap.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/StreamParkConsoleBootstrap.java
index 9401e64cd..fd818f5a3 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/StreamParkConsoleBootstrap.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/StreamParkConsoleBootstrap.java
@@ -17,6 +17,8 @@
 
 package org.apache.streampark.console;
 
+import org.apache.streampark.console.base.config.SpringProperties;
+
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.boot.autoconfigure.SpringBootApplication;
 import org.springframework.boot.builder.SpringApplicationBuilder;
@@ -46,7 +48,10 @@ import 
org.springframework.scheduling.annotation.EnableScheduling;
 @EnableScheduling
 public class StreamParkConsoleBootstrap {
 
-  public static void main(String[] args) {
-    new SpringApplicationBuilder(StreamParkConsoleBootstrap.class).run(args);
+  public static void main(String[] args) throws Exception {
+    new SpringApplicationBuilder()
+        .properties(SpringProperties.get())
+        .sources(StreamParkConsoleBootstrap.class)
+        .run(args);
   }
 }
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/config/SpringProperties.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/config/SpringProperties.java
new file mode 100644
index 000000000..9d86b0cdd
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/config/SpringProperties.java
@@ -0,0 +1,171 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.streampark.console.base.config;
+
+import org.apache.streampark.common.util.PropertiesUtils;
+import org.apache.streampark.common.util.SystemPropertyUtils;
+import org.apache.streampark.console.base.util.WebUtils;
+
+import org.apache.commons.lang3.StringUtils;
+
+import com.google.common.collect.Maps;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.InputStream;
+import java.util.Map;
+import java.util.Properties;
+
+public class SpringProperties {
+
+  private static final Logger log = 
LoggerFactory.getLogger(SpringProperties.class);
+
+  public static Properties get() {
+    // 1) get spring config
+    Properties springConfig = getSpringConfig();
+    // 2) get user config
+    Properties userConfig = getUserConfig();
+    // 3) merge config
+    mergeConfig(userConfig, springConfig);
+    // 4) datasource
+    dataSourceConfig(userConfig, springConfig);
+    // 5) system.setProperties
+    springConfig.forEach((k, v) -> SystemPropertyUtils.set(k.toString(), 
v.toString()));
+    return springConfig;
+  }
+
+  private static void dataSourceConfig(Properties userConfig, Properties 
springConfig) {
+    String dialect = userConfig.getProperty("datasource.dialect", 
"").toString();
+    if (StringUtils.isBlank(dialect)) {
+      throw new ExceptionInInitializerError(
+          "datasource.dialect is required, please check config.yaml");
+    }
+    switch (dialect.toLowerCase()) {
+      case "mysql":
+        try {
+          Class.forName("com.mysql.cj.jdbc.Driver");
+          springConfig.put("spring.datasource.driver-class-name", 
"com.mysql.cj.jdbc.Driver");
+        } catch (ClassNotFoundException e) {
+          try {
+            Class.forName("com.mysql.jdbc.Driver");
+            springConfig.put("spring.datasource.driver-class-name", 
"com.mysql.jdbc.Driver");
+          } catch (ClassNotFoundException e1) {
+            throw new ExceptionInInitializerError(
+                "datasource.dialect is mysql, \"com.mysql.cj.jdbc.Driver\" and 
\"com.mysql.jdbc.Driver\" classes not found, Please ensure that the MySQL 
Connector/J can be found under $streampark/lib,\n"
+                    + "Notice: The MySQL Connector/J is incompatible with the 
Apache 2.0 license, You need to download and put it into $streampark/lib");
+          }
+        }
+        break;
+      case "postgresql":
+      case "pgsql":
+        springConfig.put("spring.datasource.driver-class-name", 
"org.postgresql.Driver");
+        break;
+      case "h2":
+        springConfig.put("spring.datasource.driver-class-name", 
"org.h2.Driver");
+        springConfig.put("spring.datasource.username", "sa");
+        springConfig.put("spring.datasource.password", "sa");
+        springConfig.put(
+            "spring.datasource.url",
+            
"jdbc:h2:mem:streampark;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript
 from 'classpath:db/schema-h2.sql'");
+        springConfig.put("spring.sql.init.data-locations", 
"classpath:db/data-h2.sql");
+        springConfig.put("spring.sql.init.continue-on-error", "true");
+        springConfig.put("spring.sql.init.username", "sa");
+        springConfig.put("spring.sql.init.password", "sa");
+        springConfig.put("spring.sql.init.mode", "always");
+        break;
+      default:
+        throw new UnsupportedOperationException("Unsupported datasource 
dialect: " + dialect);
+    }
+  }
+
+  private static void mergeConfig(Properties userConfig, Properties 
springConfig) {
+    Map<String, String> configMapping = Maps.newHashMap();
+    configMapping.put("datasource.username", "spring.datasource.username");
+    configMapping.put("datasource.password", "spring.datasource.password");
+    configMapping.put("datasource.url", "spring.datasource.url");
+
+    userConfig.forEach(
+        (k, v) -> {
+          String key = configMapping.get(k);
+          if (key != null) {
+            springConfig.put(key, v);
+          } else {
+            springConfig.put(k, v);
+          }
+        });
+  }
+
+  private static boolean useOldConfig() {
+    String appHome = WebUtils.getAppHome();
+    if (appHome == null) {
+      return false;
+    }
+    File file = new File(appHome + "/conf/application.yml");
+    return file.exists();
+  }
+
+  private static Properties getUserConfig() {
+    String appHome = WebUtils.getAppHome();
+    Properties properties = new Properties();
+    if (appHome != null) {
+      File file = new File(appHome + "/conf/config.yaml");
+      if (file.exists() && file.isFile()) {
+        Map<String, String> config = 
PropertiesUtils.fromYamlFileAsJava(file.getAbsolutePath());
+        properties.putAll(config);
+        return properties;
+      }
+      throw new ExceptionInInitializerError(file.getAbsolutePath() + " not 
found, please check.");
+    } else {
+      InputStream inputStream =
+          
SpringProperties.class.getClassLoader().getResourceAsStream("config.yaml");
+      Map<String, String> config = 
PropertiesUtils.fromYamlFileAsJava(inputStream);
+      properties.putAll(config);
+      return properties;
+    }
+  }
+
+  private static Properties getSpringConfig() {
+    Properties config = new Properties();
+    // basic
+    config.put("spring.application.name", "Apache StreamPark");
+    config.put("spring.main.banner-mode", "false");
+    config.put("spring.devtools.restart.enabled", "false");
+    config.put("spring.aop.proxy-target-class", "true");
+    config.put("spring.messages.encoding", "utf-8");
+    config.put("spring.main.allow-circular-references", "true");
+    config.put("spring.mvc.converters.preferred-json-mapper", "jackson");
+
+    // jackson
+    config.put("spring.jackson.date-format", "yyyy-MM-dd HH:mm:ss");
+    config.put("spring.jackson.time-zone", "GMT+8");
+    config.put("spring.jackson.deserialization.fail-on-unknown-properties", 
"false");
+
+    // multipart
+    config.put("spring.servlet.multipart.enabled", "true");
+    config.put("spring.servlet.multipart.max-file-size", "-1");
+    config.put("spring.servlet.multipart.max-request-size", "-1");
+
+    // swagger-ui
+    config.put("springdoc.api-docs.enabled", "true");
+
+    // metrics
+    config.put("management.health.ldap.enabled", "false");
+    return config;
+  }
+}
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/authentication/SsoShiroPlugin.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/authentication/SsoShiroPlugin.java
index 94ab68c46..6aea4680e 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/authentication/SsoShiroPlugin.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/authentication/SsoShiroPlugin.java
@@ -39,7 +39,9 @@ import java.util.LinkedHashMap;
 @Slf4j
 /** Plugin for {@link ShiroConfig.java} to load SSO config if enabled */
 public class SsoShiroPlugin {
+
   @Autowired private Config ssoConfig;
+
   @Autowired private ShiroService shiroService;
 
   @Value("${sso.enable:#{false}}")
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java
index caa46f76e..1b1ca4abd 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java
@@ -49,7 +49,7 @@ public class SsoController {
 
   @Autowired private Authenticator authenticator;
 
-  @Value("${pac4j.properties.principalNameAttribute:#{null}}")
+  @Value("${sso.properties.principalNameAttribute:#{null}}")
   private String principalNameAttribute;
 
   @Value("${sso.enable:#{false}}")
diff --git 
a/streampark-console/streampark-console-service/src/main/resources/application-h2.yml
 
b/streampark-console/streampark-console-service/src/main/resources/application-h2.yml
deleted file mode 100644
index 1a469c8ab..000000000
--- 
a/streampark-console/streampark-console-service/src/main/resources/application-h2.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-spring:
-  datasource:
-    driver-class-name: org.h2.Driver
-    url: 
jdbc:h2:mem:streampark;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript
 from 'classpath:db/schema-h2.sql'
-    username: sa
-    password: sa
-  sql:
-    init:
-      data-locations: classpath:db/data-h2.sql
-      continue-on-error: true
-      username: sa
-      password: sa
-      mode: always
diff --git 
a/streampark-console/streampark-console-service/src/main/resources/application-mysql.yml
 
b/streampark-console/streampark-console-service/src/main/resources/application-mysql.yml
deleted file mode 100644
index 7c91f7c8c..000000000
--- 
a/streampark-console/streampark-console-service/src/main/resources/application-mysql.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-spring:
-  datasource:
-    username: root
-    password: streampark
-    driver-class-name: com.mysql.cj.jdbc.Driver
-    url: 
jdbc:mysql://localhost:3306/streampark?useSSL=false&useUnicode=true&characterEncoding=UTF-8&allowPublicKeyRetrieval=false&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=GMT%2B8
diff --git 
a/streampark-console/streampark-console-service/src/main/resources/application-pgsql.yml
 
b/streampark-console/streampark-console-service/src/main/resources/application-pgsql.yml
deleted file mode 100644
index 9c71b81c5..000000000
--- 
a/streampark-console/streampark-console-service/src/main/resources/application-pgsql.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-spring:
-  datasource:
-    username: postgres
-    password: streampark
-    driver-class-name: org.postgresql.Driver
-    url: jdbc:postgresql://localhost:5432/streampark?stringtype=unspecified
diff --git 
a/streampark-console/streampark-console-service/src/main/resources/application-sso.yml
 
b/streampark-console/streampark-console-service/src/main/resources/application-sso.yml
deleted file mode 100644
index beff0ad5b..000000000
--- 
a/streampark-console/streampark-console-service/src/main/resources/application-sso.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-pac4j:
-  callbackUrl: http://localhost:10000/callback
-  # Put all parameters under `properties`
-  # Check supported sso config parameters for different authentication clients 
from the below link
-  # 
https://github.com/pac4j/pac4j/blob/master/documentation/docs/config-module.md
-  properties:
-    principalNameAttribute:
-    # Optional, change by authentication client
-    # Please replace and fill in your client config below when enabled SSO
diff --git 
a/streampark-console/streampark-console-service/src/main/resources/application.yml
 
b/streampark-console/streampark-console-service/src/main/resources/application.yml
deleted file mode 100644
index ca1f94245..000000000
--- 
a/streampark-console/streampark-console-service/src/main/resources/application.yml
+++ /dev/null
@@ -1,147 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-server:
-  port: 10000
-  undertow:
-    buffer-size: 1024
-    direct-buffers: true
-    threads:
-      io: 4
-      worker: 20
-
-logging:
-  level:
-    root: info
-
-knife4j:
-  enable: true
-  basic:
-    # basic authentication, used to access swagger-ui and doc
-    enable: false
-    username: admin
-    password: streampark
-
-springdoc:
-  api-docs:
-    enabled: true
-  swagger-ui:
-    path: /swagger-ui.html
-  packages-to-scan: org.apache.streampark.console
-
-spring:
-  profiles:
-    active: h2 #[h2,pgsql,mysql]
-    # Please uncomment the below config if enable sso
-    # include: sso
-  application.name: StreamPark
-  devtools.restart.enabled: false
-  mvc.pathmatch.matching-strategy: ant_path_matcher
-  servlet:
-    multipart:
-      enabled: true
-      max-file-size: 500MB
-      max-request-size: 500MB
-  aop.proxy-target-class: true
-  messages.encoding: utf-8
-  jackson:
-    date-format: yyyy-MM-dd HH:mm:ss
-    time-zone: GMT+8
-    deserialization:
-      fail-on-unknown-properties: false
-  main:
-    allow-circular-references: true
-    banner-mode: off
-  mvc:
-    converters:
-      preferred-json-mapper: jackson
-
-streampark:
-  proxy:
-    # knox process address 
https://cdpsit02.example.cn:8443/gateway/cdp-proxy/yarn
-    yarn-url:
-    # lark alert proxy,default https://open.feishu.cn
-    lark-url:
-  yarn:
-    # default simple, or kerberos
-    http-auth: simple
-
-  # HADOOP_USER_NAME
-  hadoop-user-name: hdfs
-  # local workspace, used to store source code and build dir etc.
-  workspace:
-    local: /opt/streampark_workspace
-    remote: hdfs://hdfscluster/streampark   # support hdfs:///streampark/ 、 
/streampark 、hdfs://host:ip/streampark/
-
-  # remote docker register namespace for streampark
-  docker:
-    # instantiating DockerHttpClient
-    http-client:
-      max-connections: 10000
-      connection-timeout-sec: 10000
-      response-timeout-sec: 12000
-      docker-host: ""
-
-  # flink-k8s tracking configuration
-  flink-k8s:
-    tracking:
-      silent-state-keep-sec: 10
-      polling-task-timeout-sec:
-        job-status: 120
-        cluster-metric: 120
-      polling-interval-sec:
-        job-status: 2
-        cluster-metric: 3
-    # If you need to specify an ingress controller, you can use this.
-    ingress:
-      class: nginx
-
-  # packer garbage resources collection configuration
-  packer-gc:
-    # maximum retention time for temporary build resources
-    max-resource-expired-hours: 120
-    # gc task running interval hours
-    exec-cron: 0 0 0/6 * * ?
-  # application backup clean configuration
-  backup-clean:
-    # maximum retention number of backup
-    max-backup-num: 5
-    # default running once a day
-    exec-cron: 0 0 1 * * ?
-
-  shiro:
-    # token timeout, unit second
-    jwtTimeOut: 86400
-    # backend authentication-free resources url
-    anonUrl: >
-
-ldap:
-  # Is ldap enabled? If so, please modify the urls
-  enable: false
-  ## AD server IP, default port 389
-  urls: ldap://99.99.99.99:389
-  ## Login Account
-  base-dn: dc=streampark,dc=com
-  username: cn=Manager,dc=streampark,dc=com
-  password: streampark
-  user:
-    identity-attribute: uid
-    email-attribute: mail
-
-sso:
-  # If turn to true, please provide the sso properties the application-sso.yml
-  enable: false
diff --git 
a/streampark-console/streampark-console-service/src/main/resources/config.yaml 
b/streampark-console/streampark-console-service/src/main/resources/config.yaml
new file mode 100644
index 000000000..e97991827
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/resources/config.yaml
@@ -0,0 +1,96 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+logging:
+    level:
+        root: info
+
+server:
+    port: 10000
+    session:
+        # The user's login session has a validity period. If it exceeds this 
time, the user will be automatically logout
+        # unit: s|m|h|d, s: second, m:minute, h:hour, d: day
+        ttl: 2h # unit[s|m|h|d], e.g: 24h, 2d....
+    undertow: # see: 
https://github.com/undertow-io/undertow/blob/master/core/src/main/java/io/undertow/Undertow.java
+        buffer-size: 1024
+        direct-buffers: true
+        threads:
+            io: 16
+            worker: 256
+
+# system database, default h2, mysql|pgsql|h2
+datasource:
+    dialect: h2  #h2, mysql, pgsql
+    # if datasource.dialect is mysql or pgsql, you need to configure the 
following connection information
+    # mysql/postgresql connect user
+    username:
+    # mysql/postgresql connect password
+    password:
+    # mysql/postgresql connect jdbcURL
+    # mysql example: datasource.url: 
jdbc:mysql://localhost:3306/streampark?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=GMT%2B8
+    # postgresql example: 
jdbc:postgresql://localhost:5432/streampark?stringtype=unspecified
+    url:
+
+streampark:
+    workspace:
+        # Local workspace, storage directory of clone projects and compiled 
projects,Do not set under $APP_HOME. Set it to a directory outside of $APP_HOME.
+        local: /tmp/streampark
+        # The root hdfs path of the jars, Same as yarn.provided.lib.dirs for 
flink on yarn-application and Same as --jars for spark on yarn
+        remote: hdfs:///streampark/
+    proxy:
+        # lark proxy address, default https://open.feishu.cn
+        lark-url:
+        # hadoop yarn proxy path, e.g: knox process address 
https://streampark.com:8443/proxy/yarn
+        yarn-url:
+    yarn:
+        # flink on yarn or spark on yarn, monitoring job status from yarn, it 
is necessary to set hadoop.http.authentication.type
+        http-auth: 'simple'  # default simple, or kerberos
+    # flink on yarn or spark on yarn, HADOOP_USER_NAME
+    hadoop-user-name: hdfs
+
+# flink on yarn or spark on yarn, when the hadoop cluster enable kerberos 
authentication, it is necessary to set Kerberos authentication parameters.
+security:
+    kerberos:
+        login:
+            debug: false
+            enable: false
+            keytab:
+            krb5:
+            principal:
+        ttl: 2h # unit [s|m|h|d]
+
+# sign streampark with ldap.
+ldap:
+    base-dn: dc=streampark,dc=com  # Login Account
+    enable: false  # ldap enabled'
+    username: cn=Manager,dc=streampark,dc=com
+    password: streampark
+    urls: ldap://99.99.99.99:389 #AD server IP, default port 389
+    user:
+        email-attribute: mail
+        identity-attribute: uid
+
+sso:
+    enable: false
+    callbackUrl: http://localhost:10000/callback
+    # Put all parameters under `properties`
+    # Check supported sso config parameters for different authentication 
clients from the below link
+    # 
https://github.com/pac4j/pac4j/blob/master/documentation/docs/config-module.md
+    properties:
+        principalNameAttribute:
+        # Optional, change by authentication client
+        # Please replace and fill in your client config below when enabled SSO
diff --git 
a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringIntegrationTestBase.java
 
b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringIntegrationTestBase.java
index c7fd1088e..41ce7b3aa 100644
--- 
a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringIntegrationTestBase.java
+++ 
b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringIntegrationTestBase.java
@@ -36,7 +36,6 @@ import 
org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWeb
 import org.springframework.boot.test.context.SpringBootTest;
 import org.springframework.scheduling.annotation.EnableScheduling;
 import org.springframework.test.context.ActiveProfiles;
-import org.springframework.test.context.TestPropertySource;
 import org.springframework.test.context.junit.jupiter.SpringExtension;
 
 import java.io.File;
@@ -58,11 +57,33 @@ import static java.util.Objects.requireNonNull;
 @ActiveProfiles("integration-test")
 @AutoConfigureTestEntityManager
 @AutoConfigureWebTestClient(timeout = "60000")
-@TestPropertySource(locations = {"classpath:application-integration-test.yml"})
 @ExtendWith({MockitoExtension.class, SpringExtension.class})
 @SpringBootTest(
     classes = StreamParkConsoleBootstrap.class,
-    webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT)
+    webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT,
+    properties = {
+      "server.port=10000",
+      "spring.application.name=Apache StreamPark",
+      "spring.main.banner-mode=false",
+      "spring.aop.proxy-target-class=true",
+      "spring.messages.encoding=utf-8",
+      "spring.main.allow-circular-references=true",
+      "spring.mvc.converters.preferred-json-mapper=jackson",
+      "spring.jackson.date-format=yyyy-MM-dd HH:mm:ss",
+      "spring.jackson.time-zone=GMT+8",
+      "spring.jackson.deserialization.fail-on-unknown-properties=false",
+      "spring.mvc.pathmatch.matching-strategy=ant_path_matcher",
+      "datasource.dialect=h2",
+      "spring.datasource.driver-class-name=org.h2.Driver",
+      "spring.datasource.username=sa",
+      "spring.datasource.password=sa",
+      
"spring.datasource.url=jdbc:h2:mem:streampark;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript
 from 'classpath:db/schema-h2.sql'",
+      "spring.sql.init.data-locations=classpath:db/data-h2.sql",
+      "spring.sql.init.continue-on-error=true",
+      "spring.sql.init.username=sa",
+      "spring.sql.init.password=sa",
+      "spring.sql.init.mode=always"
+    })
 public abstract class SpringIntegrationTestBase {
   protected static final Logger LOG = 
LoggerFactory.getLogger(SpringIntegrationTestBase.class);
 
diff --git 
a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringUnitTestBase.java
 
b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringUnitTestBase.java
index 88f5284d8..cfbcfec7d 100644
--- 
a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringUnitTestBase.java
+++ 
b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/SpringUnitTestBase.java
@@ -36,7 +36,6 @@ import 
org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWeb
 import org.springframework.boot.test.context.SpringBootTest;
 import org.springframework.scheduling.annotation.EnableScheduling;
 import org.springframework.test.context.ActiveProfiles;
-import org.springframework.test.context.TestPropertySource;
 import org.springframework.test.context.junit.jupiter.SpringExtension;
 
 import java.io.File;
@@ -50,11 +49,33 @@ import java.nio.file.Path;
 @ActiveProfiles("test")
 @AutoConfigureTestEntityManager
 @AutoConfigureWebTestClient(timeout = "60000")
-@TestPropertySource(locations = {"classpath:application-test.yml"})
-@ExtendWith({MockitoExtension.class, SpringExtension.class})
 @SpringBootTest(
     classes = StreamParkConsoleBootstrap.class,
-    webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT)
+    webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT,
+    properties = {
+      "server.port=10000",
+      "spring.application.name=Apache StreamPark",
+      "spring.main.banner-mode=false",
+      "spring.aop.proxy-target-class=true",
+      "spring.messages.encoding=utf-8",
+      "spring.main.allow-circular-references=true",
+      "spring.mvc.converters.preferred-json-mapper=jackson",
+      "spring.jackson.date-format=yyyy-MM-dd HH:mm:ss",
+      "spring.jackson.time-zone=GMT+8",
+      "spring.jackson.deserialization.fail-on-unknown-properties=false",
+      "spring.mvc.pathmatch.matching-strategy=ant_path_matcher",
+      "datasource.dialect=h2",
+      "spring.datasource.driver-class-name=org.h2.Driver",
+      "spring.datasource.username=sa",
+      "spring.datasource.password=sa",
+      
"spring.datasource.url=jdbc:h2:mem:streampark;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript
 from 'classpath:db/schema-h2.sql'",
+      "spring.sql.init.data-locations=classpath:db/data-h2.sql",
+      "spring.sql.init.continue-on-error=true",
+      "spring.sql.init.username=sa",
+      "spring.sql.init.password=sa",
+      "spring.sql.init.mode=always"
+    })
+@ExtendWith({MockitoExtension.class, SpringExtension.class})
 public abstract class SpringUnitTestBase {
 
   protected static final Logger LOG = 
LoggerFactory.getLogger(SpringUnitTestBase.class);


Reply via email to