This is an automated email from the ASF dual-hosted git repository.

benjobs pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-streampark.git


The following commit(s) were added to refs/heads/dev by this push:
     new 11bfb7217 [Improve] spark-app create-page field order adjustment 
(#4088)
11bfb7217 is described below

commit 11bfb7217ab422e1beccab2b11a442f419ed01fe
Author: benjobs <[email protected]>
AuthorDate: Wed Sep 25 11:39:03 2024 +0800

    [Improve] spark-app create-page field order adjustment (#4088)
    
    * [Improve] spark-app state improvements
    
    * [Improve] spark-app add-page bug fixed.
    
    * [Improve] license isseu fixed
---
 .../src/locales/lang/en/flink/app.ts               | 26 +++----
 .../src/locales/lang/en/flink/variable.ts          |  2 +-
 .../src/locales/lang/en/setting/flinkCluster.ts    |  6 +-
 .../src/locales/lang/en/spark/app.ts               | 62 ++++++++---------
 .../src/locales/lang/zh-CN/flink/app.ts            | 15 ++--
 .../src/locales/lang/zh-CN/setting/flinkCluster.ts |  6 +-
 .../src/locales/lang/zh-CN/spark/app.ts            | 13 ++--
 .../src/views/flink/app/data/detail.data.ts        |  2 +-
 .../src/views/flink/app/hooks/useApp.tsx           |  8 +--
 .../flink/app/hooks/useCreateAndEditSchema.ts      |  2 +-
 .../src/views/flink/app/hooks/useCreateSchema.ts   |  8 +--
 .../src/views/flink/app/styles/Add.less            |  6 +-
 .../src/views/spark/app/components/AppForm.vue     | 12 ++--
 .../src/views/spark/app/create.vue                 |  3 +
 .../src/views/spark/app/data/detail.data.ts        |  2 +-
 .../src/views/spark/app/edit.vue                   | 18 +++--
 .../src/views/spark/app/hooks/useAppFormSchema.tsx | 79 ++++++++++++----------
 .../src/views/spark/app/hooks/useSparkAction.tsx   |  8 +--
 .../src/views/spark/app/styles/spark.less          | 31 +++++++++
 19 files changed, 172 insertions(+), 137 deletions(-)

diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts
index 6e0c82bb8..20e479d26 100644
--- 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts
@@ -16,7 +16,7 @@
  */
 export default {
   id: 'ID',
-  appName: 'Application Name',
+  appName: 'Job Name',
   searchName: 'Name',
   tags: 'Tags',
   owner: 'Owner',
@@ -26,14 +26,14 @@ export default {
   runStatus: 'Run Status',
   releaseBuild: 'Release Status',
   jobType: 'Job Type',
-  developmentMode: 'Development Mode',
-  executionMode: 'Execution Mode',
+  appType: 'Application Type',
+  executionMode: 'Deploy Mode',
   historyVersion: 'History Version',
   resource: 'Resource',
   resourcePlaceHolder: 'Please choose resource',
   selectAppPlaceHolder: 'choose app jar',
   dependency: 'Dependency',
-  appConf: 'Application Conf',
+  appConf: 'Configurations',
   resolveOrder: 'resolveOrder',
   parallelism: 'Parallelism',
   restartSize: 'Fault Restart Size',
@@ -255,10 +255,10 @@ export default {
     terminated: 'TERMINATED',
   },
   addAppTips: {
-    developmentModePlaceholder: 'Please select job type',
-    developmentModeIsRequiredMessage: 'Job Type is required',
-    executionModePlaceholder: 'Please select execution mode',
-    executionModeIsRequiredMessage: 'Execution Mode is required',
+    jobTypePlaceholder: 'Please select job type',
+    jobTypeIsRequiredMessage: 'Job Type is required',
+    executionModePlaceholder: 'Please select deploy mode',
+    executionModeIsRequiredMessage: 'Deploy Mode is required',
     hadoopEnvInitMessage:
       'Hadoop environment initialization failed, please check the environment 
settings',
     resourceFromMessage: 'resource from is required',
@@ -273,16 +273,16 @@ export default {
     useSysHadoopConf: 'Use System Hadoop Conf',
     flinkVersionIsRequiredMessage: 'Flink Version is required',
     appNamePlaceholder: 'Please enter application name',
-    appNameIsRequiredMessage: 'The application name is required',
-    appNameNotUniqueMessage: 'The application name already exists, must be 
unique. ',
+    appNameIsRequiredMessage: 'The job name is required',
+    appNameNotUniqueMessage: 'The job name already exists, must be unique. ',
     appNameExistsInYarnMessage:
-      'The application name already exists in YARN, cannot be repeated. Please 
check',
+      'The job name already exists in YARN, cannot be repeated. Please check',
     appNameExistsInK8sMessage:
-      'The application name already exists in Kubernetes,cannot be repeated. 
Please check',
+      'The job name already exists in Kubernetes,cannot be repeated. Please 
check',
     appNameValid: 'The job name is invalid',
     appNameRole: 'The job name must follow these rules: ',
     appNameNotValid:
-      'The application name is invalid, must be (Chinese or English or "-" or 
"_"), two consecutive spaces cannot appear.Please check',
+      'The job name is invalid, must be (Chinese or English or "-" or "_"), 
two consecutive spaces cannot appear.Please check',
     K8sSessionClusterIdRole: 'The Kubernetes clusterId must follow the 
following rules:',
     appNameK8sClusterIdRole:
       'The current deployment mode is kubernetes application mode, and the job 
name will be used as the clusterId in kubernetes. Therefore, the job name must 
follow the following rules:',
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/variable.ts
 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/variable.ts
index f00403480..7b7c7467f 100644
--- 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/variable.ts
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/variable.ts
@@ -51,7 +51,7 @@ export default {
   },
   depend: {
     title: 'Variable Depend Apps',
-    jobName: 'Application Name',
+    jobName: 'Job Name',
     nickName: 'Owner',
     headerTitle: 'Variable " {0} " used list',
   },
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/setting/flinkCluster.ts
 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/setting/flinkCluster.ts
index 0f054b559..c28bf96a2 100644
--- 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/setting/flinkCluster.ts
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/setting/flinkCluster.ts
@@ -27,7 +27,7 @@ export default {
     address: 'Cluster URL',
     runState: 'Run State',
     internal: 'internal cluster',
-    executionMode: 'Execution Mode',
+    executionMode: 'Deploy Mode',
     versionId: 'Flink Version',
     addExisting: 'existing cluster',
     addNew: 'new cluster',
@@ -50,7 +50,7 @@ export default {
   placeholder: {
     addType: 'Please select cluster Add Type',
     clusterName: 'Please enter cluster name',
-    executionMode: 'Please enter execution mode',
+    executionMode: 'Please enter deploy mode',
     versionId: 'Please select flink version',
     addressRemoteMode: 'Please enter jobManager URL',
     addressNoRemoteMode: 'Please enter cluster address,  e.g: 
http://host:port',
@@ -67,7 +67,7 @@ export default {
   },
   required: {
     address: 'Cluster address is required',
-    executionMode: 'Execution Mode is required',
+    executionMode: 'Deploy Mode is required',
     clusterId: 'Yarn Session Cluster is required',
     versionId: 'Flink Version is required',
     flinkImage: 'Flink Base Docker Image is required',
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts
index 58bc169c7..306e67d56 100644
--- 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts
@@ -25,29 +25,29 @@ export default {
     VCore: 'Total VCores Used',
   },
   runState: {
-    added: 'Added',
-    new: 'New',
-    saving: 'Saving',
-    starting: 'Starting',
-    submitted: 'Submitted',
-    accept: 'Accepted',
-    running: 'Running',
-    finished: 'Finished',
-    failed: 'Job Failed',
-    lost: 'Job Lost',
-    mapping: 'Mapping',
-    other: 'Other',
-    revoked: 'Revoked',
-    stopping: 'Stopping',
-    success: 'Succeeded',
-    killed: 'Killed',
+    added: 'ADDED',
+    new: 'NEW',
+    saving: 'SAVING',
+    starting: 'STARTING',
+    submitted: 'SUBMITTED',
+    accept: 'ACCEPTED',
+    running: 'RUNNING',
+    finished: 'FINISHED',
+    failed: 'FAILED',
+    lost: 'LOST',
+    mapping: 'MAPPING',
+    other: 'OTHER',
+    revoked: 'REVOKED',
+    stopping: 'CANCELING',
+    success: 'SUCCEEDED',
+    killed: 'KILLED',
   },
   releaseState: {
-    releasing: 'Releasing',
-    failed: 'Release Failed',
-    success: 'Release Successful',
-    waiting: 'Waiting to Release',
-    pending: 'Pending Rollback',
+    failed: 'FAILED',
+    success: 'SUCCESS',
+    waiting: 'WAITING',
+    releasing: 'RELEASING',
+    pending: 'PENDING',
   },
   id: 'Applications ID',
   appName: 'Job Name',
@@ -60,14 +60,14 @@ export default {
   runStatus: 'Run Status',
   releaseBuild: 'Release Status',
   jobType: 'Job Type',
-  developmentMode: 'development Mode',
-  executionMode: 'Execution Mode',
+  appType: 'Application Type',
+  executionMode: 'Deploy Mode',
   historyVersion: 'History Version',
   resource: 'Spark App',
   resourcePlaceHolder: 'Select Resource',
   selectAppPlaceHolder: 'Select Applications',
   dependency: 'Job Dependency',
-  appConf: 'Applications Configuration',
+  appConf: 'Configurations',
   resolveOrder: 'Class Loading Order',
   parallelism: 'Parallelism',
   restartSize: '(On Failure) Restart Count',
@@ -91,7 +91,7 @@ export default {
   mainClass: 'Main Entry Class',
   project: 'Project',
   module: 'Module',
-  appType: 'Job Type',
+  appType: 'Application Type',
   programArgs: 'Program Arguments',
   programJar: 'Program Jar File',
   dynamicProperties: 'Dynamic Parameters',
@@ -232,10 +232,10 @@ export default {
   },
 
   addAppTips: {
-    developmentModePlaceholder: 'Please select development mode',
-    developmentModeIsRequiredMessage: 'Development mode is required',
-    executionModePlaceholder: 'Please select execution mode',
-    executionModeIsRequiredMessage: 'Execution mode is required',
+    jobTypePlaceholder: 'Please select job type',
+    jobTypeIsRequiredMessage: 'Job type is required',
+    executionModePlaceholder: 'Please select deploy mode',
+    executionModeIsRequiredMessage: 'Deploy mode is required',
     hadoopEnvInitMessage: 'Hadoop environment check failed, please check 
configuration',
     resourceFromMessage: 'Resource source is required',
     mainClassPlaceholder: 'Please enter the main entry class',
@@ -243,8 +243,8 @@ export default {
     projectPlaceholder: 'Please select a project',
     projectIsRequiredMessage: 'Project is required',
     projectModulePlaceholder: 'Please select the project module',
-    appTypePlaceholder: 'Please select job type',
-    appTypeIsRequiredMessage: 'Job type is required',
+    appTypePlaceholder: 'Please select application type',
+    appTypeIsRequiredMessage: 'Application type is required',
     programJarIsRequiredMessage: 'Program jar file is required',
     useSysHadoopConf: 'Use system Hadoop configuration',
     sparkVersionIsRequiredMessage: 'Spark version is required',
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
index 86be877c7..f8bd4d15b 100644
--- 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
@@ -26,8 +26,7 @@ export default {
   runStatus: '运行状态',
   releaseBuild: '发布状态',
   jobType: '作业类型',
-  developmentMode: '作业模式',
-  executionMode: '执行模式',
+  executionMode: '部署模式',
   historyVersion: '历史版本',
   resource: '资源',
   resourcePlaceHolder: '从选择资源',
@@ -243,17 +242,17 @@ export default {
     terminated: '终止',
   },
   releaseState: {
+    releasing: '发布中',
     failed: '发布失败',
     success: '发布成功',
     waiting: '待发布',
-    releasing: '发布中',
-    pending: '待回滚',
+    pending: '待重启',
   },
   addAppTips: {
-    developmentModePlaceholder: '请选择开发模式',
-    developmentModeIsRequiredMessage: '开发模式必填',
-    executionModePlaceholder: '请选择执行模式',
-    executionModeIsRequiredMessage: '执行模式必填',
+    jobTypePlaceholder: '请选择作业类型',
+    jobTypeIsRequiredMessage: '作业类型必填',
+    executionModePlaceholder: '请选择部署模式',
+    executionModeIsRequiredMessage: '部署模式必填',
     hadoopEnvInitMessage: 'hadoop环境检查失败, 请检查配置',
     resourceFromMessage: '资源来源必填',
     mainClassPlaceholder: '请输入程序入口类',
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/flinkCluster.ts
 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/flinkCluster.ts
index d27416e19..77bec2d26 100644
--- 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/flinkCluster.ts
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/flinkCluster.ts
@@ -26,7 +26,7 @@ export default {
     clusterName: '集群名称',
     address: '集群URL',
     runState: '运行状态',
-    executionMode: '执行模式',
+    executionMode: '部署模式',
     versionId: 'Flink版本',
     addType: '添加类型',
     addExisting: '已有集群',
@@ -49,7 +49,7 @@ export default {
   placeholder: {
     addType: '请选择集群添加类型',
     clusterName: '请输入集群名称',
-    executionMode: '请选择执行模式',
+    executionMode: '请选择部署模式',
     versionId: '请选择Flink版本',
     addressRemoteMode: '请输入Flink 集群JobManager URL访问地址',
     addressNoRemoteMode: '请输入集群地址,例如:http://host:port',
@@ -66,7 +66,7 @@ export default {
   },
   required: {
     address: '必须填写集群地址',
-    executionMode: '执行模式必填',
+    executionMode: '部署模式必填',
     clusterId: 'Yarn Session Cluster 为必填项',
     versionId: 'Flink 版本必选',
     flinkImage: 'link基础docker镜像是必填的',
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/spark/app.ts
 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/spark/app.ts
index 495c04a85..a00276e3b 100644
--- 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/spark/app.ts
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/spark/app.ts
@@ -47,7 +47,7 @@ export default {
     failed: '发布失败',
     success: '发布成功',
     waiting: '待发布',
-    pending: '待回滚',
+    pending: '待重启',
   },
   id: '应用ID',
   appName: '应用名称',
@@ -60,8 +60,7 @@ export default {
   runStatus: '运行状态',
   releaseBuild: '发布状态',
   jobType: '作业类型',
-  developmentMode: '开发模式',
-  executionMode: '执行模式',
+  executionMode: '部署模式',
   historyVersion: '历史版本',
   resource: '资源',
   resourcePlaceHolder: '从选择资源',
@@ -231,10 +230,10 @@ export default {
   },
 
   addAppTips: {
-    developmentModePlaceholder: '请选择开发模式',
-    developmentModeIsRequiredMessage: '开发模式必填',
-    executionModePlaceholder: '请选择执行模式',
-    executionModeIsRequiredMessage: '执行模式必填',
+    jobTypePlaceholder: '请选择作业类型',
+    jobTypeIsRequiredMessage: '作业类型必填',
+    executionModePlaceholder: '请选择部署模式',
+    executionModeIsRequiredMessage: '执行部署模式',
     hadoopEnvInitMessage: 'hadoop环境检查失败, 请检查配置',
     resourceFromMessage: '资源来源必填',
     mainClassPlaceholder: '请输入程序入口类',
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/data/detail.data.ts
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/data/detail.data.ts
index 7e71e2a69..b1efbea8d 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/data/detail.data.ts
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/data/detail.data.ts
@@ -36,7 +36,7 @@ export const getDescSchema = (): DescItem[] => {
     { field: 'jobName', label: t('flink.app.appName') },
     {
       field: 'jobType',
-      label: t('flink.app.developmentMode'),
+      label: t('flink.app.jobType'),
       render: (curVal) =>
         h(
           'div',
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useApp.tsx
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useApp.tsx
index 22257b8a5..2dbd257c2 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useApp.tsx
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useApp.tsx
@@ -228,7 +228,7 @@ export const useFlinkApplication = (openStartModal: Fn) => {
         return (
           <Form class="!pt-50px">
             <Form.Item
-              label="Application Name"
+              label="Job Name"
               labelCol={{ lg: { span: 7 }, sm: { span: 7 } }}
               wrapperCol={{ lg: { span: 16 }, sm: { span: 4 } }}
               validateStatus={unref(validateStatus)}
@@ -237,7 +237,7 @@ export const useFlinkApplication = (openStartModal: Fn) => {
             >
               <Input
                 type="text"
-                placeholder="New Application Name"
+                placeholder="New Job Name"
                 onInput={(e) => {
                   copyAppName = e.target.value;
                 }}
@@ -252,7 +252,7 @@ export const useFlinkApplication = (openStartModal: Fn) => {
         //1) check empty
         if (copyAppName == null) {
           validateStatus.value = 'error';
-          help = 'Sorry, Application Name cannot be empty';
+          help = 'Sorry, Job Name cannot be empty';
           return Promise.reject('copy application error');
         }
         //2) check name
@@ -323,7 +323,7 @@ export const useFlinkApplication = (openStartModal: Fn) => {
             wrapperCol={{ lg: { span: 16 }, sm: { span: 4 } }}
             v-model:model={formValue}
           >
-            <Form.Item label="Application Name">
+            <Form.Item label="Job Name">
               <Alert message={app.jobName} type="info" />
             </Form.Item>
             {[
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
index c464b76aa..456fb34b5 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
@@ -498,7 +498,7 @@ export const useCreateAndEditSchema = (
     return [
       {
         field: 'jobType',
-        label: t('flink.app.developmentMode'),
+        label: t('flink.app.jobType'),
         component: 'Input',
         render: ({ model }) => {
           if (model.jobType == JobTypeEnum.JAR) {
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateSchema.ts
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateSchema.ts
index ccadab692..45fac9934 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateSchema.ts
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateSchema.ts
@@ -101,11 +101,11 @@ export const useCreateSchema = (dependencyRef: Ref) => {
     return [
       {
         field: 'jobType',
-        label: t('flink.app.developmentMode'),
+        label: t('flink.app.jobType'),
         component: 'Select',
         componentProps: ({ formModel }) => {
           return {
-            placeholder: t('flink.app.addAppTips.developmentModePlaceholder'),
+            placeholder: t('flink.app.addAppTips.jobTypePlaceholder'),
             options: getJobTypeOptions(),
             onChange: (value) => {
               if (value != JobTypeEnum.SQL) {
@@ -115,9 +115,7 @@ export const useCreateSchema = (dependencyRef: Ref) => {
           };
         },
         defaultValue: String(JobTypeEnum.SQL),
-        rules: [
-          { required: true, message: 
t('flink.app.addAppTips.developmentModeIsRequiredMessage') },
-        ],
+        rules: [{ required: true, message: 
t('flink.app.addAppTips.jobTypeIsRequiredMessage') }],
       },
       ...getExecutionModeSchema.value,
       ...getFlinkClusterSchemas.value,
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/styles/Add.less
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/styles/Add.less
index 7cc9a6333..b50f998f2 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/styles/Add.less
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/styles/Add.less
@@ -103,8 +103,7 @@
 }
 
 .pod-template-tool,
-.flinksql-tool,
-.sparkSql-tool {
+.flinksql-tool {
   z-index: 99;
   float: right;
   margin-right: 5px;
@@ -112,8 +111,7 @@
   margin-top: 5px;
 }
 
-.flinksql-tool-item,
-.sparkSql-tool-item {
+.flinksql-tool-item {
   font-size: 12px;
   display: flex;
   align-items: center;
diff --git 
a/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppForm.vue
 
b/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppForm.vue
index fa598b933..23be7f979 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppForm.vue
+++ 
b/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppForm.vue
@@ -101,13 +101,11 @@
       />
     </template>
     <template #args="{ model }">
-      <template v-if="model.args !== undefined">
-        <ProgramArgs
-          v-model:value="model.args"
-          :suggestions="suggestions"
-          @preview="(value) => openReviewDrawer(true, { value, suggestions })"
-        />
-      </template>
+      <ProgramArgs
+        v-model:value="model[args]"
+        :suggestions="suggestions"
+        @preview="(value) => openReviewDrawer(true, { value, suggestions })"
+      />
     </template>
     <template #formFooter>
       <div class="flex items-center w-full justify-center">
diff --git 
a/streampark-console/streampark-console-webapp/src/views/spark/app/create.vue 
b/streampark-console/streampark-console-webapp/src/views/spark/app/create.vue
index 4d783d94b..684556f62 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/spark/app/create.vue
+++ 
b/streampark-console/streampark-console-webapp/src/views/spark/app/create.vue
@@ -155,3 +155,6 @@
     />
   </PageWrapper>
 </template>
+<style lang="less">
+  @import url('./styles/spark.less');
+</style>
diff --git 
a/streampark-console/streampark-console-webapp/src/views/spark/app/data/detail.data.ts
 
b/streampark-console/streampark-console-webapp/src/views/spark/app/data/detail.data.ts
index 8ee123011..314ab3889 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/spark/app/data/detail.data.ts
+++ 
b/streampark-console/streampark-console-webapp/src/views/spark/app/data/detail.data.ts
@@ -37,7 +37,7 @@ export const getDescSchema = (): DescItem[] => {
     { field: 'appName', label: t('spark.app.appName') },
     {
       field: 'jobType',
-      label: t('spark.app.developmentMode'),
+      label: t('spark.app.jobType'),
       render: (curVal) => sparkJobTypeMap[+curVal]?.label || '-',
     },
     {
diff --git 
a/streampark-console/streampark-console-webapp/src/views/spark/app/edit.vue 
b/streampark-console/streampark-console-webapp/src/views/spark/app/edit.vue
index 76c90b6ed..7599dff50 100644
--- a/streampark-console/streampark-console-webapp/src/views/spark/app/edit.vue
+++ b/streampark-console/streampark-console-webapp/src/views/spark/app/edit.vue
@@ -64,7 +64,7 @@
       configOverride,
     });
     sparkApp.value = res;
-    nextTick(() => {
+    await nextTick(() => {
       if (res.sparkSql) 
appFormRef.value?.sparkSql?.setContent(decodeByBase64(res.sparkSql));
     });
     return res;
@@ -90,11 +90,11 @@
       hadoopUser: values.hadoopUser,
       description: values.description,
     };
-    handleUpdateAction(params);
+    await handleUpdateAction(params);
   }
   /* spark sql mode */
   async function handleSQLMode(values: Recordable) {
-    handleUpdateAction({
+    await handleUpdateAction({
       jobType: JobTypeEnum.SQL,
       executionMode: values.executionMode,
       appType: AppTypeEnum.APACHE_SPARK,
@@ -117,7 +117,7 @@
   /* Submit to create */
   async function handleAppSubmit(formValue: Recordable) {
     const { configOverride } = formValue;
-    if (configOverride != null && configOverride !== undefined && 
configOverride.trim() != '') {
+    if (configOverride != null && configOverride.trim() != '') {
       formValue.config = encryptByBase64(configOverride);
     } else {
       formValue.config = null;
@@ -133,9 +133,9 @@
           throw new Error(access);
         }
       }
-      handleSQLMode(formValue);
+      await handleSQLMode(formValue);
     } else {
-      handleCustomJobMode(formValue);
+      await handleCustomJobMode(formValue);
     }
   }
   /* send create request */
@@ -143,7 +143,7 @@
     const fetchParams: SparkApplication = {};
     for (const k in params) {
       const v = params[k];
-      if (v != null && v !== undefined) {
+      if (v != null) {
         fetchParams[k] = v;
       }
     }
@@ -182,3 +182,7 @@
     />
   </PageWrapper>
 </template>
+
+<style lang="less">
+  @import url('./styles/spark.less');
+</style>
diff --git 
a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useAppFormSchema.tsx
 
b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useAppFormSchema.tsx
index acade5001..17960169a 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useAppFormSchema.tsx
+++ 
b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useAppFormSchema.tsx
@@ -17,7 +17,7 @@
 import { computed, onMounted, ref, unref, type Ref } from 'vue';
 import type { FormSchema } from '/@/components/Form';
 import { useI18n } from '/@/hooks/web/useI18n';
-import { AppExistsStateEnum, JobTypeEnum } from '/@/enums/sparkEnum';
+import { AppExistsStateEnum, JobTypeEnum, ExecModeEnum } from 
'/@/enums/sparkEnum';
 import { ResourceFromEnum } from '/@/enums/flinkEnum';
 import type { SparkEnv } from '/@/api/spark/home.type';
 import type { RuleObject } from 'ant-design-vue/lib/form';
@@ -72,7 +72,7 @@ export function useSparkSchema(sparkEnvs: Ref<SparkEnv[]>) {
       return [
         {
           field: 'jobType',
-          label: t('spark.app.developmentMode'),
+          label: t('spark.app.jobType'),
           component: 'Input',
           render: ({ model }) => {
             const jobOptions = getJobTypeOptions();
@@ -91,11 +91,11 @@ export function useSparkSchema(sparkEnvs: Ref<SparkEnv[]>) {
       return [
         {
           field: 'jobType',
-          label: t('spark.app.developmentMode'),
+          label: t('spark.app.jobType'),
           component: 'Select',
           componentProps: ({ formModel }) => {
             return {
-              placeholder: 
t('spark.app.addAppTips.developmentModePlaceholder'),
+              placeholder: t('spark.app.addAppTips.jobTypePlaceholder'),
               options: getJobTypeOptions(),
               onChange: (value) => {
                 if (value != JobTypeEnum.SQL) {
@@ -108,7 +108,7 @@ export function useSparkSchema(sparkEnvs: Ref<SparkEnv[]>) {
           rules: [
             {
               required: true,
-              message: 
t('spark.app.addAppTips.developmentModeIsRequiredMessage'),
+              message: t('spark.app.addAppTips.jobTypeIsRequiredMessage'),
               type: 'number',
             },
           ],
@@ -196,6 +196,32 @@ export function useSparkSchema(sparkEnvs: Ref<SparkEnv[]>) 
{
           ];
         },
       },
+      {
+        field: 'args',
+        label: t('spark.app.programArgs'),
+        component: 'InputTextArea',
+        defaultValue: '',
+        slot: 'args',
+        ifShow: ({ values }) => [JobTypeEnum.JAR, 
JobTypeEnum.PYSPARK].includes(values?.jobType),
+      },
+      {
+        field: 'appProperties',
+        label: 'Spark Properties',
+        component: 'InputTextArea',
+        componentProps: {
+          rows: 4,
+          placeholder: '--conf, -c PROP=VALUE Arbitrary Spark configuration 
property.',
+        },
+      },
+      { field: 'configOverride', label: '', component: 'Input', show: false },
+      {
+        field: 'isSetConfig',
+        label: t('spark.app.appConf'),
+        component: 'Switch',
+        render({ model, field }) {
+          return renderIsSetConfig(model, field, registerConfDrawer, 
openConfDrawer);
+        },
+      },
       {
         field: 'tags',
         label: t('spark.app.tags'),
@@ -204,10 +230,21 @@ export function useSparkSchema(sparkEnvs: 
Ref<SparkEnv[]>) {
           placeholder: t('spark.app.addAppTips.tagsPlaceholder'),
         },
       },
+      {
+        field: 'hadoopUser',
+        label: t('spark.app.hadoopUser'),
+        component: 'Input',
+        ifShow: ({ values }) =>
+          values?.executionMode == ExecModeEnum.YARN_CLIENT ||
+          values?.executionMode == ExecModeEnum.YARN_CLUSTER,
+      },
       {
         field: 'yarnQueue',
         label: t('spark.app.yarnQueue'),
         component: 'Input',
+        ifShow: ({ values }) =>
+          values?.executionMode == ExecModeEnum.YARN_CLIENT ||
+          values?.executionMode == ExecModeEnum.YARN_CLUSTER,
         render: ({ model, field }) => {
           return (
             <div>
@@ -230,38 +267,6 @@ export function useSparkSchema(sparkEnvs: Ref<SparkEnv[]>) 
{
           );
         },
       },
-      { field: 'configOverride', label: '', component: 'Input', show: false },
-      {
-        field: 'isSetConfig',
-        label: t('spark.app.appConf'),
-        component: 'Switch',
-        render({ model, field }) {
-          return renderIsSetConfig(model, field, registerConfDrawer, 
openConfDrawer);
-        },
-      },
-      {
-        field: 'appProperties',
-        label: 'Application Properties',
-        component: 'InputTextArea',
-        componentProps: {
-          rows: 4,
-          placeholder:
-            '$key=$value,If there are multiple parameters,you can new line 
enter them (-D <arg>)',
-        },
-      },
-      {
-        field: 'args',
-        label: t('spark.app.programArgs'),
-        component: 'InputTextArea',
-        defaultValue: '',
-        slot: 'args',
-        ifShow: ({ values }) => [JobTypeEnum.JAR, 
JobTypeEnum.PYSPARK].includes(values?.jobType),
-      },
-      {
-        field: 'hadoopUser',
-        label: t('spark.app.hadoopUser'),
-        component: 'Input',
-      },
       {
         field: 'description',
         label: t('common.description'),
diff --git 
a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx
 
b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx
index f69fe39ba..e7a665d5d 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx
+++ 
b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx
@@ -257,7 +257,7 @@ export const useSparkAction = (optionApps: Recordable) => {
         return (
           <Form class="!pt-50px">
             <Form.Item
-              label="Application Name"
+              label="Job Name"
               labelCol={{ lg: { span: 7 }, sm: { span: 7 } }}
               wrapperCol={{ lg: { span: 16 }, sm: { span: 4 } }}
               validateStatus={unref(validateStatus)}
@@ -266,7 +266,7 @@ export const useSparkAction = (optionApps: Recordable) => {
             >
               <Input
                 type="text"
-                placeholder="New Application Name"
+                placeholder="New Job Name"
                 onInput={(e) => {
                   copyAppName = e.target.value;
                 }}
@@ -281,7 +281,7 @@ export const useSparkAction = (optionApps: Recordable) => {
         //1) check empty
         if (copyAppName == null) {
           validateStatus.value = 'error';
-          help = 'Sorry, Application Name cannot be empty';
+          help = 'Sorry, Job Name cannot be empty';
           return Promise.reject('copy application error');
         }
         //2) check name
@@ -352,7 +352,7 @@ export const useSparkAction = (optionApps: Recordable) => {
             wrapperCol={{ lg: { span: 16 }, sm: { span: 4 } }}
             v-model:model={formValue}
           >
-            <Form.Item label="Application Name">
+            <Form.Item label="Job Name">
               <Alert message={app.appName} type="info" />
             </Form.Item>
             {[ExecModeEnum.YARN_CLIENT, 
ExecModeEnum.YARN_CLUSTER].includes(app.executionMode) && (
diff --git 
a/streampark-console/streampark-console-webapp/src/views/spark/app/styles/spark.less
 
b/streampark-console/streampark-console-webapp/src/views/spark/app/styles/spark.less
new file mode 100644
index 000000000..6045ff03a
--- /dev/null
+++ 
b/streampark-console/streampark-console-webapp/src/views/spark/app/styles/spark.less
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+.sparksql-tool,
+.sparkSql-tool {
+  z-index: 99;
+  float: right;
+  margin-right: 5px;
+  cursor: pointer;
+  margin-top: 5px;
+}
+
+.sparkSql-tool-item {
+  font-size: 12px;
+  display: flex;
+  align-items: center;
+}


Reply via email to