This is an automated email from the ASF dual-hosted git repository.
kriszu pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-streampark.git
The following commit(s) were added to refs/heads/dev by this push:
new 65f20f55a [Improve] job name validator improvement (#3797)
65f20f55a is described below
commit 65f20f55a6320391abe739f0d0f43b425c459ed3
Author: benjobs <[email protected]>
AuthorDate: Sun Jun 23 08:41:55 2024 +0800
[Improve] job name validator improvement (#3797)
---
README.md | 10 +-
.../main/assembly/script/schema/mysql-schema.sql | 186 ++++++++++-----------
.../main/assembly/script/schema/pgsql-schema.sql | 29 ++--
.../main/assembly/script/upgrade/mysql/2.2.0.sql | 3 -
.../main/assembly/script/upgrade/pgsql/2.2.0.sql | 3 -
.../console/core/entity/AppBuildPipeline.java | 4 +-
.../console/core/entity/Application.java | 2 -
.../console/core/entity/SparkApplication.java | 2 -
.../console/core/enums/FlinkAppStateEnum.java | 3 -
.../console/core/enums/SparkAppStateEnum.java | 3 -
.../console/core/mapper/ApplicationMapper.java | 2 -
.../core/mapper/SparkApplicationMapper.java | 2 -
.../application/ApplicationManageService.java | 7 -
.../application/SparkApplicationManageService.java | 7 -
.../impl/ApplicationManageServiceImpl.java | 4 -
.../impl/SparkApplicationManageServiceImpl.java | 4 -
.../core/service/impl/AppBuildPipeServiceImpl.java | 8 +-
.../service/impl/SparkAppBuildPipeServiceImpl.java | 8 +-
.../console/core/watcher/FlinkAppHttpWatcher.java | 18 +-
.../core/watcher/FlinkK8sChangeEventListener.java | 14 +-
.../src/main/resources/db/schema-h2.sql | 2 -
.../resources/mapper/core/ApplicationMapper.xml | 16 --
.../mapper/core/SparkApplicationMapper.xml | 16 --
.../src/enums/flinkEnum.ts | 2 -
.../src/locales/lang/en/flink/app.ts | 12 +-
.../src/locales/lang/zh-CN/flink/app.ts | 39 +++--
.../src/locales/lang/zh-CN/setting/flinkCluster.ts | 14 +-
.../src/locales/lang/zh-CN/setting/system.ts | 2 +-
.../src/views/flink/app/components/State.less | 11 --
.../flink/app/hooks/useCreateAndEditSchema.ts | 52 ++++--
.../src/views/flink/app/hooks/useFlinkRender.tsx | 40 +++++
.../client/tool/FlinkSessionClientHelper.scala | 1 -
.../streampark/flink/packer/PackerResourceGC.scala | 2 +-
.../flink/packer/docker/DockerRetriever.scala | 10 +-
.../docker/FlinkDockerfileTemplateTrait.scala | 2 +-
.../flink/packer/pipeline/BuildPipeline.scala | 4 +-
.../{PipeError.scala => PipelineError.scala} | 0
.../{PipeSnapshot.scala => PipelineSnapshot.scala} | 2 +-
.../{PipeWatcher.scala => PipelineWatcher.scala} | 12 +-
.../flink/core/FlinkStreamTableTrait.scala | 1 -
.../streampark/flink/core/FlinkTableTrait.scala | 4 -
41 files changed, 266 insertions(+), 297 deletions(-)
diff --git a/README.md b/README.md
index 683b9c874..1e4b54ed2 100644
--- a/README.md
+++ b/README.md
@@ -43,9 +43,9 @@
## 🚀 Abstract
----
-<h4>Apache StreamPark is a stream processing development framework and
professional management platform. </h4>
+<h4>Apache StreamPark™ is a stream processing development framework and
application management platform. </h4>
-> Apache StreamPark is a streaming application development framework. Aimed at
ease building and managing streaming applications, StreamPark provides
development framework for writing stream processing application with Apache
Flink and Apache Spark, More other engines will be supported in the future.
Also, StreamPark is a professional management platform for streaming
application, including application development, debugging, interactive query,
deployment, operation, maintenance, etc. It [...]
+> Apache StreamPark™ is a streaming application development framework. Aimed
at ease building and managing streaming applications, StreamPark provides
development framework for writing stream processing application with Apache
Flink and Apache Spark, More other engines will be supported in the future.
Also, StreamPark is a professional management platform for streaming
application, including application development, debugging, interactive query,
deployment, operation, maintenance, etc. I [...]
* Apache Flink & Apache Spark application development scaffold
* Support multiple versions of Apache Flink & Apache Spark
@@ -54,8 +54,6 @@
* Support catalog、olap、streaming-warehouse etc.
* ...
-
-
## 🚀 QuickStart
- [Start with Docker](docker/README.md)
@@ -79,7 +77,7 @@ Download address for run-directly software package:
https://streampark.apache.or
## 💋 Our users
-Various companies and organizations use StreamPark for research, production
and commercial products. Are you using this project? [Welcome to add your
company](https://github.com/apache/incubator-streampark/issues/163)!
+Various companies and organizations use Apache StreamPark™ for research,
production and commercial products. Are you using this project? [Welcome to add
your company](https://github.com/apache/incubator-streampark/issues/163)!

@@ -100,7 +98,7 @@ We welcome your suggestions, comments (including
criticisms), comments and contr
### 📤 Subscribe Mailing Lists
Mail List is the most recognized form of communication in Apache community.
See how to [Join the Mailing
Lists](https://streampark.apache.org/community/contribution_guide/mailing_lists)
-Thank you to all the people who already contributed to StreamPark!
+Thank you to all the people who already contributed to Apache StreamPark™!
[](https://github.com/apache/incubator-streampark/graphs/contributors)
diff --git
a/streampark-console/streampark-console-service/src/main/assembly/script/schema/mysql-schema.sql
b/streampark-console/streampark-console-service/src/main/assembly/script/schema/mysql-schema.sql
index a7b2c020e..41495bc71 100644
---
a/streampark-console/streampark-console-service/src/main/assembly/script/schema/mysql-schema.sql
+++
b/streampark-console/streampark-console-service/src/main/assembly/script/schema/mysql-schema.sql
@@ -102,7 +102,6 @@ create table `t_flink_app` (
`ingress_template` text collate utf8mb4_general_ci,
`default_mode_ingress` text collate utf8mb4_general_ci,
`tags` varchar(500) default null,
- `probing` tinyint default 0,
`hadoop_user` varchar(64) collate utf8mb4_general_ci default null,
primary key (`id`) using btree,
key `inx_job_type` (`job_type`) using btree,
@@ -523,22 +522,22 @@ create table `t_yarn_queue` (
-- ----------------------------
drop table if exists `t_resource`;
create table if not exists `t_resource` (
-`id` bigint not null auto_increment ,
-`resource_name` varchar(128) not null comment 'The name of the resource',
-`resource_type` int not null comment '0:app 1:common 2:connector 3:format
4:udf',
-`resource_path` varchar(255) default null,
-`resource` text comment 'resource content, including jars and poms',
-`engine_type` int not null comment 'compute engine type, 0:apache flink
1:apache spark',
-`main_class` varchar(255) default null,
-`description` text default null comment 'More detailed description of
resource',
-`creator_id` bigint not null comment 'user id of creator',
-`connector_required_options` text default null,
-`connector_optional_options` text default null,
-`team_id` bigint not null comment 'team id',
-`create_time` datetime default null comment 'create time',
-`modify_time` datetime not null default current_timestamp comment 'modify
time',
- primary key (`id`) using btree,
- unique key `un_team_vcode_inx` (`team_id`,`resource_name`) using btree
+ `id` bigint not null auto_increment ,
+ `resource_name` varchar(128) not null comment 'The name of the resource',
+ `resource_type` int not null comment '0:app 1:common 2:connector 3:format
4:udf',
+ `resource_path` varchar(255) default null,
+ `resource` text comment 'resource content, including jars and poms',
+ `engine_type` int not null comment 'compute engine type, 0:apache flink
1:apache spark',
+ `main_class` varchar(255) default null,
+ `description` text default null comment 'More detailed description of
resource',
+ `creator_id` bigint not null comment 'user id of creator',
+ `connector_required_options` text default null,
+ `connector_optional_options` text default null,
+ `team_id` bigint not null comment 'team id',
+ `create_time` datetime default null comment 'create time',
+ `modify_time` datetime not null default current_timestamp comment 'modify
time',
+ primary key (`id`) using btree,
+ unique key `un_team_vcode_inx` (`team_id`,`resource_name`) using btree
) engine=innodb auto_increment=100000 default charset=utf8mb4
collate=utf8mb4_general_ci;
@@ -547,17 +546,17 @@ create table if not exists `t_resource` (
-- ----------------------------
drop table if exists `t_spark_env`;
create table `t_spark_env` (
- `id` bigint not null auto_increment comment
'id',
- `spark_name` varchar(128) collate
utf8mb4_general_ci not null comment 'spark instance name',
- `spark_home` varchar(255) collate
utf8mb4_general_ci not null comment 'spark home path',
- `version` varchar(64) collate
utf8mb4_general_ci not null comment 'spark version',
- `scala_version` varchar(64) collate
utf8mb4_general_ci not null comment 'scala version of spark',
- `spark_conf` text collate utf8mb4_general_ci
not null comment 'spark-conf',
- `is_default` tinyint not null default 0 comment
'whether default version or not',
- `description` varchar(255) collate
utf8mb4_general_ci default null comment 'description',
- `create_time` datetime default null comment
'create time',
- primary key (`id`) using btree,
- unique key `un_env_name` (`spark_name`) using
btree
+ `id` bigint not null auto_increment comment 'id',
+ `spark_name` varchar(128) collate utf8mb4_general_ci not null comment 'spark
instance name',
+ `spark_home` varchar(255) collate utf8mb4_general_ci not null comment 'spark
home path',
+ `version` varchar(64) collate utf8mb4_general_ci not null comment 'spark
version',
+ `scala_version` varchar(64) collate utf8mb4_general_ci not null comment
'scala version of spark',
+ `spark_conf` text collate utf8mb4_general_ci not null comment 'spark-conf',
+ `is_default` tinyint not null default 0 comment 'whether default version or
not',
+ `description` varchar(255) collate utf8mb4_general_ci default null comment
'description',
+ `create_time` datetime default null comment 'create time',
+primary key (`id`) using btree,
+unique key `un_env_name` (`spark_name`) using btree
) engine=innodb auto_increment=100000 default charset=utf8mb4
collate=utf8mb4_general_ci;
@@ -566,72 +565,71 @@ create table `t_spark_env` (
-- ----------------------------
drop table if exists `t_spark_app`;
create table `t_spark_app` (
- `id` bigint not null auto_increment,
- `team_id` bigint not null,
- `job_type` tinyint default null,
- `execution_mode` tinyint default null,
- `resource_from` tinyint default null,
- `project_id` bigint default null,
- `job_name` varchar(255) collate
utf8mb4_general_ci default null,
- `module` varchar(255) collate
utf8mb4_general_ci default null,
- `jar` varchar(255) collate utf8mb4_general_ci
default null,
- `jar_check_sum` bigint default null,
- `main_class` varchar(255) collate
utf8mb4_general_ci default null,
- `args` text collate utf8mb4_general_ci,
- `options` text collate utf8mb4_general_ci,
- `hot_params` text collate utf8mb4_general_ci,
- `user_id` bigint default null,
- `app_id` varchar(64) collate utf8mb4_general_ci
default null,
- `app_type` tinyint default null,
- `duration` bigint default null,
- `job_id` varchar(64) collate utf8mb4_general_ci
default null,
- `job_manager_url` varchar(255) collate
utf8mb4_general_ci default null,
- `version_id` bigint default null,
- `cluster_id` varchar(45) collate
utf8mb4_general_ci default null,
- `k8s_name` varchar(63) collate
utf8mb4_general_ci default null,
- `k8s_namespace` varchar(63) collate
utf8mb4_general_ci default null,
- `spark_image` varchar(128) collate
utf8mb4_general_ci default null,
- `state` int default null,
- `restart_size` int default null,
- `restart_count` int default null,
- `cp_threshold` int default null,
- `cp_max_failure_interval` int default null,
- `cp_failure_rate_interval` int default null,
- `cp_failure_action` tinyint default null,
- `dynamic_properties` text collate
utf8mb4_general_ci,
- `description` varchar(255) collate
utf8mb4_general_ci default null,
- `resolve_order` tinyint default null,
- `k8s_rest_exposed_type` tinyint default null,
- `jm_memory` int default null,
- `tm_memory` int default null,
- `total_task` int default null,
- `total_tm` int default null,
- `total_slot` int default null,
- `available_slot` int default null,
- `option_state` tinyint default null,
- `tracking` tinyint default null,
- `create_time` datetime default null comment
'create time',
- `modify_time` datetime default null comment
'modify time',
- `option_time` datetime default null,
- `release` tinyint default 1,
- `build` tinyint default 1,
- `start_time` datetime default null,
- `end_time` datetime default null,
- `alert_id` bigint default null,
- `k8s_pod_template` text collate
utf8mb4_general_ci,
- `k8s_jm_pod_template` text collate
utf8mb4_general_ci,
- `k8s_tm_pod_template` text collate
utf8mb4_general_ci,
- `k8s_hadoop_integration` tinyint default 0,
- `spark_cluster_id` bigint default null,
- `ingress_template` text collate
utf8mb4_general_ci,
- `default_mode_ingress` text collate
utf8mb4_general_ci,
- `tags` varchar(500) default null,
- `probing` tinyint default 0,
- `hadoop_user` varchar(64) collate
utf8mb4_general_ci default null,
- primary key (`id`) using btree,
- key `inx_job_type` (`job_type`) using btree,
- key `inx_track` (`tracking`) using btree,
- index `inx_team` (`team_id`) using btree
+ `id` bigint not null auto_increment,
+ `team_id` bigint not null,
+ `job_type` tinyint default null,
+ `execution_mode` tinyint default null,
+ `resource_from` tinyint default null,
+ `project_id` bigint default null,
+ `job_name` varchar(255) collate utf8mb4_general_ci default null,
+ `module` varchar(255) collate utf8mb4_general_ci default null,
+ `jar` varchar(255) collate utf8mb4_general_ci default null,
+ `jar_check_sum` bigint default null,
+ `main_class` varchar(255) collate utf8mb4_general_ci default null,
+ `args` text collate utf8mb4_general_ci,
+ `options` text collate utf8mb4_general_ci,
+ `hot_params` text collate utf8mb4_general_ci,
+ `user_id` bigint default null,
+ `app_id` varchar(64) collate utf8mb4_general_ci default null,
+ `app_type` tinyint default null,
+ `duration` bigint default null,
+ `job_id` varchar(64) collate utf8mb4_general_ci default null,
+ `job_manager_url` varchar(255) collate utf8mb4_general_ci default null,
+ `version_id` bigint default null,
+ `cluster_id` varchar(45) collate utf8mb4_general_ci default null,
+ `k8s_name` varchar(63) collate utf8mb4_general_ci default null,
+ `k8s_namespace` varchar(63) collate utf8mb4_general_ci default null,
+ `spark_image` varchar(128) collate utf8mb4_general_ci default null,
+ `state` int default null,
+ `restart_size` int default null,
+ `restart_count` int default null,
+ `cp_threshold` int default null,
+ `cp_max_failure_interval` int default null,
+ `cp_failure_rate_interval` int default null,
+ `cp_failure_action` tinyint default null,
+ `dynamic_properties` text collate utf8mb4_general_ci,
+ `description` varchar(255) collate utf8mb4_general_ci default null,
+ `resolve_order` tinyint default null,
+ `k8s_rest_exposed_type` tinyint default null,
+ `jm_memory` int default null,
+ `tm_memory` int default null,
+ `total_task` int default null,
+ `total_tm` int default null,
+ `total_slot` int default null,
+ `available_slot` int default null,
+ `option_state` tinyint default null,
+ `tracking` tinyint default null,
+ `create_time` datetime default null comment 'create time',
+ `modify_time` datetime default null comment 'modify time',
+ `option_time` datetime default null,
+ `release` tinyint default 1,
+ `build` tinyint default 1,
+ `start_time` datetime default null,
+ `end_time` datetime default null,
+ `alert_id` bigint default null,
+ `k8s_pod_template` text collate utf8mb4_general_ci,
+ `k8s_jm_pod_template` text collate utf8mb4_general_ci,
+ `k8s_tm_pod_template` text collate utf8mb4_general_ci,
+ `k8s_hadoop_integration` tinyint default 0,
+ `spark_cluster_id` bigint default null,
+ `ingress_template` text collate utf8mb4_general_ci,
+ `default_mode_ingress` text collate utf8mb4_general_ci,
+ `tags` varchar(500) default null,
+ `hadoop_user` varchar(64) collate utf8mb4_general_ci default null,
+ primary key (`id`) using btree,
+ key `inx_job_type` (`job_type`) using btree,
+ key `inx_track` (`tracking`) using btree,
+ index `inx_team` (`team_id`) using btree
) engine=innodb auto_increment=100000 default charset=utf8mb4
collate=utf8mb4_general_ci;
set foreign_key_checks = 1;
diff --git
a/streampark-console/streampark-console-service/src/main/assembly/script/schema/pgsql-schema.sql
b/streampark-console/streampark-console-service/src/main/assembly/script/schema/pgsql-schema.sql
index cb310abe5..0b1f0ece9 100644
---
a/streampark-console/streampark-console-service/src/main/assembly/script/schema/pgsql-schema.sql
+++
b/streampark-console/streampark-console-service/src/main/assembly/script/schema/pgsql-schema.sql
@@ -250,7 +250,6 @@ create table "public"."t_flink_app" (
"ingress_template" text collate "pg_catalog"."default",
"default_mode_ingress" text collate "pg_catalog"."default",
"tags" varchar(500) collate "pg_catalog"."default",
- "probing" boolean default false,
"hadoop_user" varchar(63) collate "pg_catalog"."default"
)
;
@@ -619,20 +618,20 @@ create sequence "public"."streampark_t_resource_id_seq"
increment 1 start 10000 cache 1 minvalue 10000 maxvalue
9223372036854775807;
create table "public"."t_resource" (
- "id" int8 not null default
nextval('streampark_t_resource_id_seq'::regclass),
- "resource_name" varchar(128) collate
"pg_catalog"."default" not null,
- "resource_type" int4,
- "resource_path" varchar(255) default
null,
- "resource" text collate
"pg_catalog"."default",
- "engine_type" int4,
- "main_class" varchar(255) collate
"pg_catalog"."default",
- "description" text collate
"pg_catalog"."default" default null,
- "creator_id" int8 not null,
- "connector_required_options" text
default null,
- "connector_optional_options" text
default null,
- "team_id" int8 not null,
- "create_time" timestamp(6),
- "modify_time" timestamp(6)
+ "id" int8 not null default
nextval('streampark_t_resource_id_seq'::regclass),
+ "resource_name" varchar(128) collate "pg_catalog"."default" not null,
+ "resource_type" int4,
+ "resource_path" varchar(255) default null,
+ "resource" text collate "pg_catalog"."default",
+ "engine_type" int4,
+ "main_class" varchar(255) collate "pg_catalog"."default",
+ "description" text collate "pg_catalog"."default" default null,
+ "creator_id" int8 not null,
+ "connector_required_options" text default null,
+ "connector_optional_options" text default null,
+ "team_id" int8 not null,
+ "create_time" timestamp(6),
+ "modify_time" timestamp(6)
)
;
comment on column "public"."t_resource"."id" is 'Resource id';
diff --git
a/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/mysql/2.2.0.sql
b/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/mysql/2.2.0.sql
index 355f1b0de..47b8b5c1c 100644
---
a/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/mysql/2.2.0.sql
+++
b/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/mysql/2.2.0.sql
@@ -43,9 +43,6 @@ unique key `un_team_vcode_inx` (`team_id`,`resource_name`)
using btree
alter table `t_flink_sql`
add column `team_resource` varchar(64) default null;
-alter table `t_flink_app`
- add column `probing` tinyint default 0;
-
alter table `t_flink_app`
add column `hadoop_user` varchar(64) default null;
diff --git
a/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/pgsql/2.2.0.sql
b/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/pgsql/2.2.0.sql
index a2add60cf..8406feaba 100644
---
a/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/pgsql/2.2.0.sql
+++
b/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/pgsql/2.2.0.sql
@@ -59,9 +59,6 @@ create index "un_team_dname_inx" on "public"."t_resource"
using btree (
alter table "public"."t_flink_sql"
add column "team_resource" varchar(64) default null;
-alter table "public"."t_flink_app"
-add column "probing" boolean default false;
-
alter table "public"."t_flink_cluster"
add column "job_manager_url" varchar(150) collate "pg_catalog"."default",
add column "start_time" timestamp(6) collate "pg_catalog"."default",
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/AppBuildPipeline.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/AppBuildPipeline.java
index 9314a0714..a8bd32ee7 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/AppBuildPipeline.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/AppBuildPipeline.java
@@ -22,7 +22,7 @@ import org.apache.streampark.console.base.util.JacksonUtils;
import org.apache.streampark.flink.packer.pipeline.BuildPipeline;
import org.apache.streampark.flink.packer.pipeline.BuildResult;
import org.apache.streampark.flink.packer.pipeline.PipeError;
-import org.apache.streampark.flink.packer.pipeline.PipeSnapshot;
+import org.apache.streampark.flink.packer.pipeline.PipelineSnapshot;
import org.apache.streampark.flink.packer.pipeline.PipelineStatusEnum;
import org.apache.streampark.flink.packer.pipeline.PipelineStepStatusEnum;
import org.apache.streampark.flink.packer.pipeline.PipelineTypeEnum;
@@ -253,7 +253,7 @@ public class AppBuildPipeline {
}
/** Create object from PipeSnapshot */
- public static AppBuildPipeline fromPipeSnapshot(@Nonnull PipeSnapshot
snapshot) {
+ public static AppBuildPipeline fromPipeSnapshot(@Nonnull PipelineSnapshot
snapshot) {
return new AppBuildPipeline()
.setPipeType(snapshot.pipeType())
.setPipeStatus(snapshot.pipeStatus())
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
index 4e491487b..856cd2e1a 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
@@ -216,8 +216,6 @@ public class Application implements Serializable {
@TableField(updateStrategy = FieldStrategy.IGNORED)
private String tags;
- private Boolean probing = false;
-
/** running job */
private transient JobsOverview.Task overview;
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/SparkApplication.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/SparkApplication.java
index eb8b6688d..76866e4ce 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/SparkApplication.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/SparkApplication.java
@@ -212,8 +212,6 @@ public class SparkApplication implements Serializable {
private String tags;
- private Boolean probing = false;
-
/** running job */
private transient JobsOverview.Task overview;
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FlinkAppStateEnum.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FlinkAppStateEnum.java
index 6c3e8d447..72496957e 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FlinkAppStateEnum.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FlinkAppStateEnum.java
@@ -100,9 +100,6 @@ public enum FlinkAppStateEnum {
/** Job SUCCEEDED on yarn. */
SUCCEEDED(20),
- /** Job auto Health probe */
- PROBING(21),
-
/** Has killed in Yarn. */
KILLED(-9);
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/SparkAppStateEnum.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/SparkAppStateEnum.java
index 12bf74018..8e7a83816 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/SparkAppStateEnum.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/SparkAppStateEnum.java
@@ -96,9 +96,6 @@ public enum SparkAppStateEnum {
/** Job SUCCEEDED on yarn. */
SUCCEEDED(20),
- /** Job auto Health probe */
- PROBING(21),
-
/** Has killed in Yarn. */
KILLED(-9);
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/mapper/ApplicationMapper.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/mapper/ApplicationMapper.java
index 970ddc734..f928dcc84 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/mapper/ApplicationMapper.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/mapper/ApplicationMapper.java
@@ -37,8 +37,6 @@ public interface ApplicationMapper extends
BaseMapper<Application> {
List<Application> selectAppsByTeamId(@Param("teamId") Long teamId);
- List<Application> selectProbeApps();
-
boolean mapping(@Param("app") Application appParam);
List<String> selectRecentK8sNamespaces(@Param("limitSize") Integer limit);
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/mapper/SparkApplicationMapper.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/mapper/SparkApplicationMapper.java
index e260facac..674e2fc98 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/mapper/SparkApplicationMapper.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/mapper/SparkApplicationMapper.java
@@ -38,8 +38,6 @@ public interface SparkApplicationMapper extends
BaseMapper<SparkApplication> {
void persistMetrics(@Param("app") SparkApplication application);
- List<SparkApplication> selectProbeApps();
-
boolean mapping(@Param("app") SparkApplication appParam);
List<String> selectRecentK8sNamespaces(@Param("limitSize") Integer limit);
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java
index a5263fee5..1a1a2fa85 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java
@@ -164,11 +164,4 @@ public interface ApplicationManageService extends
IService<Application> {
*/
List<Application> listByTeamIdAndExecutionModes(
Long teamId, Collection<FlinkExecutionMode> executionModeEnums);
-
- /**
- * Retrieves a list of applications be probing or need to probe.
- *
- * @return a list of applications be probing or need to probe.
- */
- List<Application> listProbeApps();
}
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/SparkApplicationManageService.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/SparkApplicationManageService.java
index 346c5d69e..d57b7c9d3 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/SparkApplicationManageService.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/SparkApplicationManageService.java
@@ -164,11 +164,4 @@ public interface SparkApplicationManageService extends
IService<SparkApplication
*/
List<SparkApplication> listByTeamIdAndExecutionModes(
Long teamId, Collection<SparkExecutionMode> executionModeEnums);
-
- /**
- * Retrieves a list of applications be probing or need to probe.
- *
- * @return a list of applications be probing or need to probe.
- */
- List<SparkApplication> listProbeApps();
}
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java
index be9f534d2..5407cd44b 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java
@@ -680,10 +680,6 @@ public class ApplicationManageServiceImpl extends
ServiceImpl<ApplicationMapper,
.collect(Collectors.toSet())));
}
- public List<Application> listProbeApps() {
- return this.baseMapper.selectProbeApps();
- }
-
@Override
public boolean checkBuildAndUpdate(Application appParam) {
boolean build = appParam.getBuild();
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/SparkApplicationManageServiceImpl.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/SparkApplicationManageServiceImpl.java
index 985efb328..930d24271 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/SparkApplicationManageServiceImpl.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/SparkApplicationManageServiceImpl.java
@@ -606,10 +606,6 @@ public class SparkApplicationManageServiceImpl
.collect(Collectors.toSet())));
}
- public List<SparkApplication> listProbeApps() {
- return this.baseMapper.selectProbeApps();
- }
-
@Override
public boolean checkBuildAndUpdate(SparkApplication appParam) {
boolean build = appParam.getBuild();
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java
index b0f19d1f8..d7f1b0f9a 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java
@@ -73,8 +73,8 @@ import
org.apache.streampark.flink.packer.pipeline.FlinkK8sApplicationBuildReque
import org.apache.streampark.flink.packer.pipeline.FlinkK8sSessionBuildRequest;
import
org.apache.streampark.flink.packer.pipeline.FlinkRemotePerJobBuildRequest;
import
org.apache.streampark.flink.packer.pipeline.FlinkYarnApplicationBuildRequest;
-import org.apache.streampark.flink.packer.pipeline.PipeSnapshot;
import org.apache.streampark.flink.packer.pipeline.PipeWatcher;
+import org.apache.streampark.flink.packer.pipeline.PipelineSnapshot;
import org.apache.streampark.flink.packer.pipeline.PipelineStatusEnum;
import org.apache.streampark.flink.packer.pipeline.PipelineTypeEnum;
import
org.apache.streampark.flink.packer.pipeline.impl.FlinkK8sApplicationBuildPipeline;
@@ -207,7 +207,7 @@ public class AppBuildPipeServiceImpl
pipeline.registerWatcher(
new PipeWatcher() {
@Override
- public void onStart(PipeSnapshot snapshot) {
+ public void onStart(PipelineSnapshot snapshot) {
AppBuildPipeline buildPipeline =
AppBuildPipeline.fromPipeSnapshot(snapshot).setAppId(app.getId());
saveEntity(buildPipeline);
@@ -285,14 +285,14 @@ public class AppBuildPipeServiceImpl
}
@Override
- public void onStepStateChange(PipeSnapshot snapshot) {
+ public void onStepStateChange(PipelineSnapshot snapshot) {
AppBuildPipeline buildPipeline =
AppBuildPipeline.fromPipeSnapshot(snapshot).setAppId(app.getId());
saveEntity(buildPipeline);
}
@Override
- public void onFinish(PipeSnapshot snapshot, BuildResult result) {
+ public void onFinish(PipelineSnapshot snapshot, BuildResult result) {
AppBuildPipeline buildPipeline =
AppBuildPipeline.fromPipeSnapshot(snapshot)
.setAppId(app.getId())
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkAppBuildPipeServiceImpl.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkAppBuildPipeServiceImpl.java
index 620e4e4a5..22f3d6050 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkAppBuildPipeServiceImpl.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SparkAppBuildPipeServiceImpl.java
@@ -61,8 +61,8 @@ import org.apache.streampark.flink.packer.maven.Artifact;
import org.apache.streampark.flink.packer.maven.DependencyInfo;
import org.apache.streampark.flink.packer.pipeline.BuildPipeline;
import org.apache.streampark.flink.packer.pipeline.BuildResult;
-import org.apache.streampark.flink.packer.pipeline.PipeSnapshot;
import org.apache.streampark.flink.packer.pipeline.PipeWatcher;
+import org.apache.streampark.flink.packer.pipeline.PipelineSnapshot;
import org.apache.streampark.flink.packer.pipeline.PipelineStatusEnum;
import
org.apache.streampark.flink.packer.pipeline.SparkYarnApplicationBuildRequest;
import
org.apache.streampark.flink.packer.pipeline.impl.SparkYarnApplicationBuildPipeline;
@@ -180,7 +180,7 @@ public class SparkAppBuildPipeServiceImpl
pipeline.registerWatcher(
new PipeWatcher() {
@Override
- public void onStart(PipeSnapshot snapshot) {
+ public void onStart(PipelineSnapshot snapshot) {
AppBuildPipeline buildPipeline =
AppBuildPipeline.fromPipeSnapshot(snapshot).setAppId(app.getId());
saveEntity(buildPipeline);
@@ -258,14 +258,14 @@ public class SparkAppBuildPipeServiceImpl
}
@Override
- public void onStepStateChange(PipeSnapshot snapshot) {
+ public void onStepStateChange(PipelineSnapshot snapshot) {
AppBuildPipeline buildPipeline =
AppBuildPipeline.fromPipeSnapshot(snapshot).setAppId(app.getId());
saveEntity(buildPipeline);
}
@Override
- public void onFinish(PipeSnapshot snapshot, BuildResult result) {
+ public void onFinish(PipelineSnapshot snapshot, BuildResult result) {
AppBuildPipeline buildPipeline =
AppBuildPipeline.fromPipeSnapshot(snapshot)
.setAppId(app.getId())
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkAppHttpWatcher.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkAppHttpWatcher.java
index be6e19611..b0664d0be 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkAppHttpWatcher.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkAppHttpWatcher.java
@@ -292,20 +292,24 @@ public class FlinkAppHttpWatcher {
FlinkExecutionMode execMode = application.getFlinkExecutionMode();
if (FlinkExecutionMode.YARN_APPLICATION.equals(execMode)
|| FlinkExecutionMode.YARN_PER_JOB.equals(execMode)) {
- optional =
- jobsOverview.getJobs().size() > 1
- ? jobsOverview.getJobs().stream()
- .filter(a -> StringUtils.equals(application.getJobId(),
a.getId()))
- .findFirst()
- : jobsOverview.getJobs().stream().findFirst();
+ if (jobsOverview.getJobs() != null) {
+ optional =
+ jobsOverview.getJobs().size() > 1
+ ? jobsOverview.getJobs().stream()
+ .filter(a -> StringUtils.equals(application.getJobId(),
a.getId()))
+ .findFirst()
+ : jobsOverview.getJobs().stream().findFirst();
+ } else {
+ optional = Optional.empty();
+ }
} else {
optional =
jobsOverview.getJobs().stream()
.filter(x -> x.getId().equals(application.getJobId()))
.findFirst();
}
- if (optional.isPresent()) {
+ if (optional.isPresent()) {
JobsOverview.Job jobOverview = optional.get();
FlinkAppStateEnum currentState =
FlinkAppStateEnum.of(jobOverview.getState());
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkK8sChangeEventListener.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkK8sChangeEventListener.java
index d2e7b7539..332186513 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkK8sChangeEventListener.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkK8sChangeEventListener.java
@@ -25,7 +25,6 @@ import
org.apache.streampark.console.core.enums.FlinkAppStateEnum;
import org.apache.streampark.console.core.enums.OptionStateEnum;
import org.apache.streampark.console.core.metrics.flink.CheckPoints;
import org.apache.streampark.console.core.service.alert.AlertService;
-import
org.apache.streampark.console.core.service.application.ApplicationInfoService;
import
org.apache.streampark.console.core.service.application.ApplicationManageService;
import org.apache.streampark.flink.kubernetes.enums.FlinkJobState;
import org.apache.streampark.flink.kubernetes.enums.FlinkK8sExecuteMode;
@@ -66,15 +65,13 @@ public class FlinkK8sChangeEventListener {
@Lazy @Autowired private ApplicationManageService applicationManageService;
- @Autowired private ApplicationInfoService applicationInfoService;
-
@Lazy @Autowired private AlertService alertService;
@Lazy @Autowired private FlinkCheckpointProcessor checkpointProcessor;
@Qualifier("streamparkNotifyExecutor")
@Autowired
- private Executor executorService;
+ private Executor executor;
/**
* Catch FlinkJobStatusChangeEvent then storage it persistently to db.
Actually update
@@ -101,14 +98,7 @@ public class FlinkK8sChangeEventListener {
|| FlinkAppStateEnum.LOST == state
|| FlinkAppStateEnum.RESTARTING == state
|| FlinkAppStateEnum.FINISHED == state) {
- executorService.execute(
- () -> {
- if (app.getProbing()) {
- log.info("application with id {} is probing, don't send alert",
app.getId());
- return;
- }
- alertService.alert(app.getAlertId(), AlertTemplate.of(app, state));
- });
+ executor.execute(() -> alertService.alert(app.getAlertId(),
AlertTemplate.of(app, state)));
}
}
diff --git
a/streampark-console/streampark-console-service/src/main/resources/db/schema-h2.sql
b/streampark-console/streampark-console-service/src/main/resources/db/schema-h2.sql
index 3354234b6..e3fca631f 100644
---
a/streampark-console/streampark-console-service/src/main/resources/db/schema-h2.sql
+++
b/streampark-console/streampark-console-service/src/main/resources/db/schema-h2.sql
@@ -95,7 +95,6 @@ create table if not exists `t_flink_app` (
`ingress_template` text ,
`default_mode_ingress` text ,
`tags` varchar(500) default null,
- `probing` tinyint default 0,
`hadoop_user` varchar(500) default null,
primary key(`id`)
);
@@ -573,7 +572,6 @@ create table if not exists `t_spark_app` (
`ingress_template` text ,
`default_mode_ingress` text ,
`tags` varchar(500) default null,
- `probing` tinyint default 0,
`hadoop_user` varchar(500) default null,
primary key(`id`)
);
diff --git
a/streampark-console/streampark-console-service/src/main/resources/mapper/core/ApplicationMapper.xml
b/streampark-console/streampark-console-service/src/main/resources/mapper/core/ApplicationMapper.xml
index 5fc3f338c..154518f29 100644
---
a/streampark-console/streampark-console-service/src/main/resources/mapper/core/ApplicationMapper.xml
+++
b/streampark-console/streampark-console-service/src/main/resources/mapper/core/ApplicationMapper.xml
@@ -75,7 +75,6 @@
<result column="rest_url" jdbcType="VARCHAR" property="restUrl"/>
<result column="rest_port" jdbcType="INTEGER" property="restPort"/>
<result column="tags" jdbcType="VARCHAR" property="tags"/>
- <result column="probing" jdbcType="INTEGER" property="probing"/>
</resultMap>
<update id="resetOptionState">
@@ -265,21 +264,6 @@
where t.team_id=#{teamId}
</select>
- <select id="selectProbeApps"
resultType="org.apache.streampark.console.core.entity.Application">
- select
- t.*,
- u.username,
- case
- when trim(u.nick_name) = ''
- then u.username
- else u.nick_name
- end as nick_name
- from t_flink_app t
- inner join t_user u
- on t.user_id = u.user_id
- where t.probing = true or (t.tracking = 1 and t.state = 13)
- </select>
-
<update id="mapping"
parameterType="org.apache.streampark.console.core.entity.Application">
update t_flink_app
<set>
diff --git
a/streampark-console/streampark-console-service/src/main/resources/mapper/core/SparkApplicationMapper.xml
b/streampark-console/streampark-console-service/src/main/resources/mapper/core/SparkApplicationMapper.xml
index bb161aa31..80217cf9d 100644
---
a/streampark-console/streampark-console-service/src/main/resources/mapper/core/SparkApplicationMapper.xml
+++
b/streampark-console/streampark-console-service/src/main/resources/mapper/core/SparkApplicationMapper.xml
@@ -76,7 +76,6 @@
<result column="rest_url" jdbcType="VARCHAR" property="restUrl"/>
<result column="rest_port" jdbcType="INTEGER" property="restPort"/>
<result column="tags" jdbcType="VARCHAR" property="tags"/>
- <result column="probing" jdbcType="INTEGER" property="probing"/>
</resultMap>
<update id="resetOptionState">
@@ -190,21 +189,6 @@
where t.team_id=#{teamId}
</select>
- <select id="selectProbeApps"
resultType="org.apache.streampark.console.core.entity.SparkApplication">
- select
- t.*,
- u.username,
- case
- when trim(u.nick_name) = ''
- then u.username
- else u.nick_name
- end as nick_name
- from t_spark_app t
- inner join t_user u
- on t.user_id = u.user_id
- where t.probing = true or (t.tracking = 1 and t.state = 13)
- </select>
-
<update id="mapping"
parameterType="org.apache.streampark.console.core.entity.SparkApplication">
update t_spark_app
<set>
diff --git
a/streampark-console/streampark-console-webapp/src/enums/flinkEnum.ts
b/streampark-console/streampark-console-webapp/src/enums/flinkEnum.ts
index 6098378cd..e758aacac 100644
--- a/streampark-console/streampark-console-webapp/src/enums/flinkEnum.ts
+++ b/streampark-console/streampark-console-webapp/src/enums/flinkEnum.ts
@@ -115,8 +115,6 @@ export enum AppStateEnum {
POS_TERMINATED = 19,
/** job SUCCEEDED on yarn */
SUCCEEDED = 20,
- /** has killed in Yarn */
- PROBING = 21,
/** Job auto Health probe */
KILLED = -9,
}
diff --git
a/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts
b/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts
index ff29d9980..8888e8e0b 100644
---
a/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts
+++
b/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts
@@ -275,8 +275,18 @@ export default {
'The application name already exists in YARN, cannot be repeated. Please
check',
appNameExistsInK8sMessage:
'The application name already exists in Kubernetes,cannot be repeated.
Please check',
+ appNameValid: 'The job name is invalid',
+ appNameRole: 'The job name must follow these rules: ',
appNameNotValid:
'The application name is invalid, must be (Chinese or English or "-" or
"_"), two consecutive spaces cannot appear.Please check',
+ K8sSessionClusterIdRole: 'The Kubernetes clusterId must follow the
following rules:',
+ appNameK8sClusterIdRole:
+ 'The current deployment mode is kubernetes application mode, and the job
name will be used as the clusterId in kubernetes. Therefore, the job name must
follow the following rules:',
+ appNameK8sClusterIdRoleLength: 'must be no more than 45 characters',
+ appNameK8sClusterIdRoleRegexp:
+ 'must only contain lowercase alphanumeric characters and "-",The
required format is [a-z]([-a-z0-9]*[a-z0-9])',
+ appNameRoleContent:
+ 'must be (Chinese or English or "-" or "_"), two consecutive spaces
cannot appear.Please check',
flinkClusterIsRequiredMessage: 'Flink Cluster is required',
flinkSqlIsRequiredMessage: 'Flink SQL is required',
tagsPlaceholder: 'Please enter tags,if more than one, separate them with
commas(,)',
@@ -296,7 +306,7 @@ export default {
flinkImagePlaceholder:
'Please enter the tag of Flink base docker image, such as:
flink:1.13.0-scala_2.11-java8',
flinkImageIsRequiredMessage: 'Flink Base Docker Image is required',
- k8sRestExposedTypePlaceholder: 'kubernetes.rest-service.exposed.type',
+ k8sRestExposedTypePlaceholder: 'Kubernetes Rest-Service Exposed Type',
hadoopXmlConfigFileTips:
'Automatically copy configuration files from system environment
parameters',
dynamicPropertiesPlaceholder:
diff --git
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
index 2aee8d0cb..a3d55c7fa 100644
---
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
+++
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
@@ -47,10 +47,10 @@ export default {
yarnQueue: 'Yarn队列',
mavenPom: 'maven pom',
uploadJar: '上传依赖Jar文件',
- kubernetesNamespace: 'K8S命名空间',
- kubernetesClusterId: 'K8S ClusterId',
- flinkBaseDockerImage: 'Flink基础docker镜像',
- restServiceExposedType: 'K8S服务对外类型',
+ kubernetesNamespace: 'Kubernetes 命名空间',
+ kubernetesClusterId: 'Kubernetes ClusterId',
+ flinkBaseDockerImage: 'Flink 基础docker镜像',
+ restServiceExposedType: 'Rest-Service Exposed Type',
resourceFrom: '资源来源',
uploadJobJar: '上传jar文件',
selectJobJar: '选择jar文件',
@@ -71,7 +71,7 @@ export default {
hadoopUser: 'Hadoop User',
restoreModeTip: 'flink 1.15开始支持restore模式,一般情况下不用设置该参数',
release: {
- releaseTitle: '该应用程序的当前启动正在进行中.',
+ releaseTitle: '该作业正在启动中.',
releaseDesc: '您确定要强制进行另一次构建吗',
releaseFail: '发布作业失败',
releasing: '当前作业正在发布中',
@@ -153,11 +153,11 @@ export default {
errorLog: '错误日志',
errorSummary: '错误摘要',
errorStack: '错误堆栈',
- logTitle: '启动日志 : 应用名称 [ {0} ]',
+ logTitle: '启动日志 : 作业名称 [ {0} ]',
refreshTime: '上次刷新时间',
refresh: '刷新',
start: '启动作业',
- stop: '停止应用',
+ stop: '停止作业',
savepoint: '触发 Savepoint',
recheck: '关联的项目已更新,需要重新发布此作业',
changed: '作业已更新',
@@ -265,10 +265,19 @@ export default {
appNamePlaceholder: '请输入作业名称',
appNameIsRequiredMessage: '作业名称必填',
appNameNotUniqueMessage: '作业名称必须唯一, 输入的作业名称已经存在',
- appNameExistsInYarnMessage: '应用程序名称已经在YARN集群中存在,不能重复。请检查',
- appNameExistsInK8sMessage: '该应用程序名称已经在K8S集群中存在,不能重复。请检查',
+ appNameExistsInYarnMessage: '作业名称已经在YARN集群中存在,不能重复。请检查',
+ appNameExistsInK8sMessage: '该作业名称已经在 Kubernetes 集群中存在,不能重复。请检查',
+ appNameValid: '作业名称不合法',
+ appNameRole: '作业必须遵循以下规则:',
+ K8sSessionClusterIdRole: 'Kubernetes 集群ID必要遵循以下规则:',
+ appNameK8sClusterIdRole:
+ '当前部署模式是 Kubernetes Application 模式,会将作业名称作为 Kubernetes 的
clusterId,因此作业名称要遵循以下规则:',
+ appNameK8sClusterIdRoleLength: '不应超过 45 个字符',
+ appNameK8sClusterIdRoleRegexp:
+ '只能由小写字母、数字、字符、和"-" 组成,必须满足正则格式 [a-z]([-a-z0-9]*[a-z0-9])',
+ appNameRoleContent: '字符必须是(中文 或 英文 或 "-" 或 "_"),不能出现两个连续的空格',
appNameNotValid:
- '应用程序名称无效。字符必须是(中文 或 英文 或 "-" 或 "_"),不能出现两个连续的空格,请检查',
+ '作业名称无效。字符必须是(中文 或 英文 或 "-" 或 "_"),不能出现两个连续的空格,请检查',
flinkClusterIsRequiredMessage: 'Flink集群必填',
flinkSqlIsRequiredMessage: 'Flink SQL必填',
tagsPlaceholder: '请输入标签,如果超过一个,用逗号(,)分隔',
@@ -279,15 +288,15 @@ export default {
totalMemoryOptionsPlaceholder: '请选择要设置的资源参数',
tmPlaceholder: '请选择要设置的资源参数',
yarnQueuePlaceholder: '请输入yarn队列标签名称',
- descriptionPlaceholder: '请输入此应用程序的描述',
- kubernetesNamespacePlaceholder: '请输入K8S命名空间, 如: default',
- kubernetesClusterIdPlaceholder: '请选择K8S ClusterId',
+ descriptionPlaceholder: '请输入此作业的描述',
+ kubernetesNamespacePlaceholder: '请输入 Kubernetes 命名空间, 如: default',
+ kubernetesClusterIdPlaceholder: '请选择 Kubernetes ClusterId',
kubernetesClusterIdRequire:
'小写字母、数字、“-”,并且必须以字母数字字符开头和结尾,并且不超过45个字符',
- kubernetesClusterIdIsRequiredMessage: 'K8S ClusterId必填',
+ kubernetesClusterIdIsRequiredMessage: 'Kubernetes ClusterId 必填',
flinkImagePlaceholder:
'请输入Flink基础docker镜像的标签,如:flink:1.13.0-scala_2.11-java8',
flinkImageIsRequiredMessage: 'Flink基础docker镜像是必填的',
- k8sRestExposedTypePlaceholder: 'K8S服务对外类型',
+ k8sRestExposedTypePlaceholder: 'Kubernetes Rest-Service Exposed Type',
hadoopXmlConfigFileTips: '从系统环境参数自动复制配置文件',
dynamicPropertiesPlaceholder: '$key=$value,如果有多个参数,可以换行输入(-D <arg>)',
},
diff --git
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/flinkCluster.ts
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/flinkCluster.ts
index 4e926d749..9cfdb9b84 100644
---
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/flinkCluster.ts
+++
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/flinkCluster.ts
@@ -32,12 +32,12 @@ export default {
addNew: '全新集群',
yarnQueue: 'Yarn队列',
yarnSessionClusterId: 'Yarn Session模式集群ID',
- k8sNamespace: 'k8s命名空间',
- k8sClusterId: 'k8s集群ID',
- serviceAccount: 'k8s命名空间绑定的服务账号',
- k8sConf: 'k8s环境Kube配置文件',
- flinkImage: 'Flink基础docker镜像',
- k8sRestExposedType: 'K8S服务对外类型',
+ k8sNamespace: 'Kubernetes 命名空间',
+ k8sClusterId: 'Kubernetes 集群 ID',
+ serviceAccount: 'Kubernetes 服务账号',
+ k8sConf: 'Kube 配置文件',
+ flinkImage: 'Flink 基础 Docker 镜像',
+ k8sRestExposedType: 'Kubernetes Rest exposed-type',
resolveOrder: '类加载顺序',
taskSlots: '任务槽数',
jmOptions: 'JM内存',
@@ -54,7 +54,7 @@ export default {
addressNoRemoteMode: '请输入集群地址,例如:http://host:port',
yarnSessionClusterId: '请输入Yarn Session模式集群ID',
k8sConf: '示例:~/.kube/config',
- flinkImage: '请输入Flink基础docker镜像的标签,如:flink:1.13.0-scala_2.11-java8',
+ flinkImage: '请输入 Flink 基础 docker 镜像的标签,如:flink:1.13.0-scala_2.11-java8',
k8sRestExposedType: 'kubernetes.rest-service.exposed.type',
resolveOrder: 'classloader.resolve-order',
taskSlots: '每个TaskManager的插槽数',
diff --git
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/system.ts
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/system.ts
index 5974b8909..9738766a0 100644
---
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/system.ts
+++
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/setting/system.ts
@@ -30,7 +30,7 @@ export default {
name: '控制台配置',
},
ingressSetting: {
- name: 'k8s Ingress 配置',
+ name: 'Kubernetes Ingress 配置',
},
},
update: {
diff --git
a/streampark-console/streampark-console-webapp/src/views/flink/app/components/State.less
b/streampark-console/streampark-console-webapp/src/views/flink/app/components/State.less
index 895a612a0..e790359d9 100644
---
a/streampark-console/streampark-console-webapp/src/views/flink/app/components/State.less
+++
b/streampark-console/streampark-console-webapp/src/views/flink/app/components/State.less
@@ -143,14 +143,3 @@
}
}
-@keyframes probing-color {
- 0% {
- border-color: #2febc9;
- box-shadow: 0 0 1px #2febc9, inset 0 0 2px #2febc9;
- }
-
- 100% {
- border-color: #2febc9;
- box-shadow: 0 0 10px #2febc9, inset 0 0 5px #2febc9;
- }
-}
diff --git
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
index 25847a28f..d3263d2a1 100644
---
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
+++
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
@@ -29,6 +29,7 @@ import {
renderTotalMemory,
renderYarnQueue,
renderFlinkCluster,
+ renderJobName,
} from './useFlinkRender';
import { fetchCheckName } from '/@/api/flink/app';
@@ -279,26 +280,33 @@ export const useCreateAndEditSchema = (
});
/* Detect job name field */
- async function getJobNameCheck(_rule: RuleObject, value: StoreValue) {
+ async function getJobNameCheck(_rule: RuleObject, value: StoreValue, model:
Recordable) {
if (value === null || value === undefined || value === '') {
return
Promise.reject(t('flink.app.addAppTips.appNameIsRequiredMessage'));
- } else {
- const params = { jobName: value };
- if (edit?.appId) Object.assign(params, { id: edit.appId });
- const res = await fetchCheckName(params);
- switch (parseInt(res)) {
- case 0:
- return Promise.resolve();
- case 1:
- return
Promise.reject(t('flink.app.addAppTips.appNameNotUniqueMessage'));
- case 2:
- return
Promise.reject(t('flink.app.addAppTips.appNameExistsInYarnMessage'));
- case 3:
- return
Promise.reject(t('flink.app.addAppTips.appNameExistsInK8sMessage'));
- default:
- return Promise.reject(t('flink.app.addAppTips.appNameNotValid'));
+ }
+ if (model.executionMode == ExecModeEnum.KUBERNETES_APPLICATION) {
+ const regexp = /^(?=.{1,45}$)[a-z]([-a-z0-9]*[a-z0-9])$/;
+ if (!regexp.test(value)) {
+ return Promise.reject(t('flink.app.addAppTips.appNameValid'));
}
}
+ const params = { jobName: value };
+ if (edit?.appId) {
+ Object.assign(params, { id: edit.appId });
+ }
+ const res = await fetchCheckName(params);
+ switch (parseInt(res)) {
+ case 0:
+ return Promise.resolve();
+ case 1:
+ return
Promise.reject(t('flink.app.addAppTips.appNameNotUniqueMessage'));
+ case 2:
+ return
Promise.reject(t('flink.app.addAppTips.appNameExistsInYarnMessage'));
+ case 3:
+ return
Promise.reject(t('flink.app.addAppTips.appNameExistsInK8sMessage'));
+ default:
+ return Promise.reject(t('flink.app.addAppTips.appNameValid'));
+ }
}
const getFlinkFormOtherSchemas = computed((): FormSchema[] => {
@@ -313,8 +321,16 @@ export const useCreateAndEditSchema = (
label: t('flink.app.appName'),
component: 'Input',
componentProps: { placeholder:
t('flink.app.addAppTips.appNamePlaceholder') },
- dynamicRules: () => {
- return [{ required: true, trigger: 'blur', validator:
getJobNameCheck }];
+ render: (param) => renderJobName(param),
+ dynamicRules: ({ model }) => {
+ return [
+ {
+ required: true,
+ trigger: 'blur',
+ validator: (rule: RuleObject, value: StoreValue) =>
+ getJobNameCheck(rule, value, model),
+ },
+ ];
},
},
{
diff --git
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useFlinkRender.tsx
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useFlinkRender.tsx
index 45b22f1d5..62975b584 100644
---
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useFlinkRender.tsx
+++
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useFlinkRender.tsx
@@ -44,6 +44,7 @@ import {
FailoverStrategyEnum,
RestoreModeEnum,
ClusterStateEnum,
+ ExecModeEnum,
} from '/@/enums/flinkEnum';
import { useI18n } from '/@/hooks/web/useI18n';
import { fetchYarnQueueList } from '/@/api/setting/yarnQueue';
@@ -299,6 +300,45 @@ export const renderFlinkCluster = (clusters, { model,
field }: RenderCallbackPar
);
};
+export const renderJobName = ({ model, field }: RenderCallbackParams) => {
+ return (
+ <div>
+ <Input
+ name="jobName"
+ placeholder={t('flink.app.addAppTips.appNamePlaceholder')}
+ value={model[field]}
+ onInput={(e: ChangeEvent) => (model[field] = e?.target?.value)}
+ />
+ <p class="conf-desc mt-10px">
+ <span class="note-info">
+ <Tag color="#2db7f5" class="tag-note">
+ {t('flink.app.noteInfo.note')}
+ </Tag>
+ {model.executionMode == ExecModeEnum.KUBERNETES_APPLICATION && (
+ <span>
+ {t('flink.app.addAppTips.appNameK8sClusterIdRole')}
+ <div>
+ <Tag color="orange"> 1.</Tag>
+ {t('flink.app.addAppTips.appNameK8sClusterIdRoleLength')}
+ </div>
+ <div>
+ <Tag color="orange"> 2.</Tag>
+ {t('flink.app.addAppTips.appNameK8sClusterIdRoleRegexp')}
+ </div>
+ </span>
+ )}
+ {model.executionMode != ExecModeEnum.KUBERNETES_APPLICATION && (
+ <span>
+ <span>{t('flink.app.addAppTips.appNameRole')}</span>
+ <span>{t('flink.app.addAppTips.appNameRoleContent')}</span>
+ </span>
+ )}
+ </span>
+ </p>
+ </div>
+ );
+};
+
/* render memory option */
export const renderDynamicProperties = ({ model, field }:
RenderCallbackParams) => {
return (
diff --git
a/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/tool/FlinkSessionClientHelper.scala
b/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/tool/FlinkSessionClientHelper.scala
index b47a5555e..bd3d8b5f5 100644
---
a/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/tool/FlinkSessionClientHelper.scala
+++
b/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/tool/FlinkSessionClientHelper.scala
@@ -27,7 +27,6 @@ import
org.apache.hc.client5.http.entity.mime.MultipartEntityBuilder
import org.apache.hc.client5.http.fluent.Request
import org.apache.hc.core5.http.ContentType
import org.apache.hc.core5.http.io.entity.StringEntity
-import org.apache.hc.core5.util.Timeout
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization
diff --git
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/PackerResourceGC.scala
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/PackerResourceGC.scala
index b73cf0e84..801641b8f 100644
---
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/PackerResourceGC.scala
+++
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/PackerResourceGC.scala
@@ -30,7 +30,7 @@ import scala.util.Try
/** Garbage resource collector during packing. */
object PackerResourceGC extends Logger {
- val appWorkspacePath: String = Workspace.local.APP_WORKSPACE
+ private val appWorkspacePath: String = Workspace.local.APP_WORKSPACE
/**
* Start a building legacy resources collection process.
diff --git
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/docker/DockerRetriever.scala
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/docker/DockerRetriever.scala
index 142bedcc3..d966a751e 100644
---
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/docker/DockerRetriever.scala
+++
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/docker/DockerRetriever.scala
@@ -42,7 +42,7 @@ object DockerRetriever {
* docker http client builder, use ApacheDockerHttpClient by default todo
support custom http
* client configuration parameters in unified configurations in the future
*/
- lazy val dockerHttpClientBuilder: ApacheDockerHttpClient.Builder =
+ private lazy val dockerHttpClientBuilder: ApacheDockerHttpClient.Builder =
new ApacheDockerHttpClient.Builder()
.dockerHost(dockerClientConf.getDockerHost)
.sslConfig(dockerClientConf.getSSLConfig)
@@ -59,10 +59,10 @@ object DockerRetriever {
}
/** set docker-host for kata */
- def setDockerHost(): Unit = {
- val dockerhost: String = InternalConfigHolder.get(CommonConfig.DOCKER_HOST)
- if (Utils.isNotEmpty(dockerhost)) {
- val dockerHostUri: URI = new URI(dockerhost)
+ private def setDockerHost(): Unit = {
+ val dockerHost: String = InternalConfigHolder.get(CommonConfig.DOCKER_HOST)
+ if (Utils.isNotEmpty(dockerHost)) {
+ val dockerHostUri: URI = new URI(dockerHost)
dockerHttpClientBuilder.dockerHost(dockerHostUri)
}
}
diff --git
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/docker/FlinkDockerfileTemplateTrait.scala
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/docker/FlinkDockerfileTemplateTrait.scala
index 319c862d4..1c412a62f 100644
---
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/docker/FlinkDockerfileTemplateTrait.scala
+++
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/docker/FlinkDockerfileTemplateTrait.scala
@@ -47,7 +47,7 @@ trait FlinkDockerfileTemplateTrait {
def innerMainJarPath: String = s"local:///opt/flink/usrlib/$mainJarName"
/** output dockerfile name */
- val DEFAULT_DOCKER_FILE_NAME = "Dockerfile"
+ protected val DEFAULT_DOCKER_FILE_NAME = "Dockerfile"
protected val FLINK_LIB_PATH = "lib"
protected val FLINK_HOME: String = "$FLINK_HOME"
diff --git
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/BuildPipeline.scala
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/BuildPipeline.scala
index 3317e101b..3e6ae7ae2 100644
---
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/BuildPipeline.scala
+++
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/BuildPipeline.scala
@@ -84,7 +84,7 @@ trait BuildPipeline extends BuildPipelineProcess with
BuildPipelineExpose with L
.toSeq: _*)
/** use to identify the log record that belongs to which pipeline instance */
- protected val logSuffix: String = s"appName=${offerBuildParam.appName}"
+ private val logSuffix: String = s"appName=${offerBuildParam.appName}"
protected var watcher: PipeWatcher = new SilentPipeWatcher
@@ -171,7 +171,7 @@ trait BuildPipeline extends BuildPipelineProcess with
BuildPipelineExpose with L
super.logError(s"[streampark-packer] $msg | $logSuffix", throwable)
/** intercept snapshot */
- def snapshot: PipeSnapshot = PipeSnapshot(
+ def snapshot: PipelineSnapshot = PipelineSnapshot(
offerBuildParam.appName,
pipeType,
getPipeStatus,
diff --git
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipeError.scala
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipelineError.scala
similarity index 100%
rename from
streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipeError.scala
rename to
streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipelineError.scala
diff --git
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipeSnapshot.scala
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipelineSnapshot.scala
similarity index 98%
rename from
streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipeSnapshot.scala
rename to
streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipelineSnapshot.scala
index dc159a4f5..e6326df21 100644
---
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipeSnapshot.scala
+++
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipelineSnapshot.scala
@@ -34,7 +34,7 @@ import scala.collection.JavaConverters._
* StepSeq -> (PipeStepStatus -> status update timestamp)
*/
-case class PipeSnapshot(
+case class PipelineSnapshot(
appName: String,
pipeType: PipelineTypeEnum,
pipeStatus: PipelineStatusEnum,
diff --git
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipeWatcher.scala
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipelineWatcher.scala
similarity index 76%
rename from
streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipeWatcher.scala
rename to
streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipelineWatcher.scala
index f99fe4134..14ea5d927 100644
---
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipeWatcher.scala
+++
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/pipeline/PipelineWatcher.scala
@@ -22,25 +22,25 @@ trait PipeWatcher {
/** called when the pipeline is launched. */
@throws[Exception]
- def onStart(snapshot: PipeSnapshot): Unit
+ def onStart(snapshot: PipelineSnapshot): Unit
/** called when the any status of building step is changed. */
@throws[Exception]
- def onStepStateChange(snapshot: PipeSnapshot): Unit
+ def onStepStateChange(snapshot: PipelineSnapshot): Unit
/**
* called when the pipeline is finished, or you can get the results directly
from the
* BuildPipeline.launch() synchronously.
*/
@throws[Exception]
- def onFinish(snapshot: PipeSnapshot, result: BuildResult): Unit
+ def onFinish(snapshot: PipelineSnapshot, result: BuildResult): Unit
}
class SilentPipeWatcher extends PipeWatcher {
- override def onStart(snapshot: PipeSnapshot): Unit = {}
+ override def onStart(snapshot: PipelineSnapshot): Unit = {}
- override def onStepStateChange(snapshot: PipeSnapshot): Unit = {}
+ override def onStepStateChange(snapshot: PipelineSnapshot): Unit = {}
- override def onFinish(snapshot: PipeSnapshot, result: BuildResult): Unit = {}
+ override def onFinish(snapshot: PipelineSnapshot, result: BuildResult): Unit
= {}
}
diff --git
a/streampark-flink/streampark-flink-shims/streampark-flink-shims-base/src/main/scala/org/apache/streampark/flink/core/FlinkStreamTableTrait.scala
b/streampark-flink/streampark-flink-shims/streampark-flink-shims-base/src/main/scala/org/apache/streampark/flink/core/FlinkStreamTableTrait.scala
index 517ae1ae6..382038ed6 100644
---
a/streampark-flink/streampark-flink-shims/streampark-flink-shims-base/src/main/scala/org/apache/streampark/flink/core/FlinkStreamTableTrait.scala
+++
b/streampark-flink/streampark-flink-shims/streampark-flink-shims-base/src/main/scala/org/apache/streampark/flink/core/FlinkStreamTableTrait.scala
@@ -16,7 +16,6 @@
*/
package org.apache.streampark.flink.core
-import org.apache.streampark.common.conf.ConfigKeys._
import org.apache.streampark.common.util.Utils
import org.apache.streampark.flink.core.EnhancerImplicit._
diff --git
a/streampark-flink/streampark-flink-shims/streampark-flink-shims-base/src/main/scala/org/apache/streampark/flink/core/FlinkTableTrait.scala
b/streampark-flink/streampark-flink-shims/streampark-flink-shims-base/src/main/scala/org/apache/streampark/flink/core/FlinkTableTrait.scala
index 71b1a1c48..9fdc0271e 100644
---
a/streampark-flink/streampark-flink-shims/streampark-flink-shims-base/src/main/scala/org/apache/streampark/flink/core/FlinkTableTrait.scala
+++
b/streampark-flink/streampark-flink-shims/streampark-flink-shims-base/src/main/scala/org/apache/streampark/flink/core/FlinkTableTrait.scala
@@ -16,7 +16,6 @@
*/
package org.apache.streampark.flink.core
-import org.apache.streampark.common.conf.ConfigKeys._
import org.apache.streampark.common.util.Utils
import org.apache.streampark.flink.core.EnhancerImplicit._
@@ -47,9 +46,6 @@ abstract class FlinkTableTrait(val parameter: ParameterTool,
private val tableEn
def sql(sql: String = null): Unit = FlinkSqlExecutor.executeSql(sql,
parameter, this)
- private[flink] def sqlWithCallBack(sql: String = null)(implicit
- callback: Unit => String = null): Unit =
FlinkSqlExecutor.executeSql(sql, parameter, this)
-
override def fromValues(values: Expression*): Table =
tableEnv.fromValues(values)
override def fromValues(rowType: AbstractDataType[_], values: Expression*):
Table =