This is an automated email from the ASF dual-hosted git repository.
benjobs pushed a commit to branch dev-2.1.4
in repository https://gitbox.apache.org/repos/asf/incubator-streampark.git
The following commit(s) were added to refs/heads/dev-2.1.4 by this push:
new bae7f8d75 [Improve] config.yaml format improvement
bae7f8d75 is described below
commit bae7f8d751914f3f83e86f831b17453064e6326b
Author: benjobs <[email protected]>
AuthorDate: Sat Apr 6 00:38:21 2024 +0800
[Improve] config.yaml format improvement
---
.../src/main/resources/config.yaml | 123 +++++++++++----------
1 file changed, 67 insertions(+), 56 deletions(-)
diff --git
a/streampark-console/streampark-console-service/src/main/resources/config.yaml
b/streampark-console/streampark-console-service/src/main/resources/config.yaml
index b52179568..5be2ac8ff 100644
---
a/streampark-console/streampark-console-service/src/main/resources/config.yaml
+++
b/streampark-console/streampark-console-service/src/main/resources/config.yaml
@@ -15,66 +15,77 @@
# limitations under the License.
#
-# logging level
-logging.level.root: info
-# server port
-server.port: 10000
-# The user's login session has a validity period. If it exceeds this time, the
user will be automatically logout
-# unit: s|m|h|d, s: second, m:minute, h:hour, d: day
-server.session.ttl: 2h # unit[s|m|h|d], e.g: 24h, 2d....
+logging:
+ level:
+ root: info
-# see:
https://github.com/undertow-io/undertow/blob/master/core/src/main/java/io/undertow/Undertow.java
-server.undertow.direct-buffers: true
-server.undertow.buffer-size: 1024
-server.undertow.threads.io: 16
-server.undertow.threads.worker: 256
+server:
+ port: 10000
+ session:
+ # The user's login session has a validity period. If it exceeds this time,
the user will be automatically logout
+ # unit: s|m|h|d, s: second, m:minute, h:hour, d: day
+ ttl: 2h # unit[s|m|h|d], e.g: 24h, 2d....
+ undertow: # see:
https://github.com/undertow-io/undertow/blob/master/core/src/main/java/io/undertow/Undertow.java
+ buffer-size: 1024
+ direct-buffers: true
+ threads:
+ io: 16
+ worker: 256
# system database, default h2, mysql|pgsql|h2
-datasource.dialect: h2 # h2, pgsql
-#if datasource.dialect is mysql or pgsql, you need to configure the following
connection information
-# mysql/postgresql connect access user
-datasource.username:
-# mysql/postgresql connect access password
-datasource.password:
-# mysql/postgresql connection address
-# mysql jdbc url example: datasource.url:
jdbc:mysql://localhost:3306/streampark?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=GMT%2B8
-# postgresql jdbc url example:
jdbc:postgresql://localhost:5432/streampark?stringtype=unspecified
-datasource.url:
-#---------------------------------------------------------------------------------
+datasource:
+ dialect: h2 #h2, pgsql
+ #if datasource.dialect is mysql or pgsql, you need to configure the
following connection information
+ # mysql/postgresql connect access user
+ username:
+ # mysql/postgresql connect access password
+ password:
+ # mysql/postgresql connection address
+ # mysql jdbc url example: datasource.url:
jdbc:mysql://localhost:3306/streampark?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=GMT%2B8
+ # postgresql jdbc url example:
jdbc:postgresql://localhost:5432/streampark?stringtype=unspecified
+ url:
-# Local workspace, storage of clone projects and compiled projects,Do not set
under $APP_HOME. Set it to a directory outside of $APP_HOME.
-streampark.workspace.local: /tmp/streampark
-# The root hdfs path of the jars, Same as yarn.provided.lib.dirs for flink on
yarn-application
-# and Same as --jars for spark on yarn
-streampark.workspace.remote: hdfs:///streampark/
-# hadoop yarn proxy path, e.g: knox process address
https://streampark.com:8443/proxy/yarn
-streampark.proxy.yarn-url:
-# lark proxy address, default https://open.feishu.cn
-streampark.proxy.lark-url:
-# flink on yarn or spark on yarn, monitoring job status from yarn, it is
necessary to set hadoop.http.authentication.type
-streampark.yarn.http-auth: simple # default simple, or kerberos
-# flink on yarn or spark on yarn, it is necessary to set
-streampark.hadoop-user-name: hdfs
-# flink on k8s ingress setting, If an ingress controller is specified in the
configuration, the ingress class
-# kubernetes.io/ingress.class must be specified when creating the ingress,
since there are often
-# multiple ingress controllers in a production environment.
-streampark.flink-k8s.ingress.class: nginx
-
-# sign streampark with ldap.
-ldap.enable: false # ldap enabled
-ldap.urls: ldap://99.99.99.99:389 #AD server IP, default port 389
-ldap.base-dn: dc=streampark,dc=com # Login Account
-ldap.username: cn=Manager,dc=streampark,dc=com
-ldap.password: streampark
-ldap.user.identity-attribute: uid
-ldap.user.email-attribute: mail
+streampark:
+ workspace:
+ # Local workspace, storage of clone projects and compiled projects,Do not
set under $APP_HOME. Set it to a directory outside of $APP_HOME.
+ local: /tmp/streampark
+ # The root hdfs path of the jars, Same as yarn.provided.lib.dirs for flink
on yarn-application
+ # and Same as --jars for spark on yarn
+ remote: hdfs:///streampark/
+ proxy:
+ # lark proxy address, default https://open.feishu.cn
+ lark-url:
+ # hadoop yarn proxy path, e.g: knox process address
https://streampark.com:8443/proxy/yarn
+ yarn-url:
+ yarn:
+ # flink on yarn or spark on yarn, monitoring job status from yarn, it is
necessary to set hadoop.http.authentication.type
+ http-auth: 'simple' # default simple, or kerberos
+ #flink on yarn or spark on yarn, HADOOP_USER_NAME
+ hadoop-user-name: hdfs
+ # flink on k8s ingress setting, If an ingress controller is specified in the
configuration, the ingress class
+ # kubernetes.io/ingress.class must be specified when creating the ingress,
since there are often
+ # multiple ingress controllers in a production environment.
+ flink-k8s.ingress.class: nginx
# flink on yarn or spark on yarn, when the hadoop cluster enable kerberos
authentication,
# it is necessary to set up Kerberos authentication related parameters.
-security.kerberos.login.enable: false
-security.kerberos.login.debug: false
-# kerberos principal path
-security.kerberos.login.principal:
-security.kerberos.login.krb5:
-security.kerberos.login.keytab:
-security.kerberos.ttl: 2h # unit [s|m|h|d]
+security:
+ kerberos:
+ login:
+ debug: false
+ enable: false
+ keytab:
+ krb5:
+ principal:
+ ttl: 2h # unit [s|m|h|d]
+
+# sign streampark with ldap.
+ldap:
+ base-dn: dc=streampark,dc=com # Login Account
+ enable: false # ldap enabled'
+ username: cn=Manager,dc=streampark,dc=com
+ password: streampark
+ urls: ldap://99.99.99.99:389 #AD server IP, default port 389
+ user:
+ email-attribute: mail
+ identity-attribute: uid