This is an automated email from the ASF dual-hosted git repository.

benjobs pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-streampark.git


The following commit(s) were added to refs/heads/dev by this push:
     new 8e1669b53 [Improve][Issue-2681] support team dependency management 
(#2706)
8e1669b53 is described below

commit 8e1669b539c4c488d6e639ccbfc62f2bf6248095
Author: zhoulii <[email protected]>
AuthorDate: Wed May 3 20:00:01 2023 +0800

    [Improve][Issue-2681] support team dependency management (#2706)
    
    * [Improve] upgrade dependency management to resource management
    * [Imporve] add ddl & dml of resource management module in h2 sql script
    * [Imporve] change DependencyMapper.xml to ResourceMapper.xml
    * [FIX] Address review comments
    
    ---------
    
    Co-authored-by: zhoulii <[email protected]>
---
 .../src/main/assembly/script/data/mysql-data.sql   |  14 +-
 .../src/main/assembly/script/data/pgsql-data.sql   |  14 +-
 .../main/assembly/script/schema/mysql-schema.sql   |  19 +++
 .../main/assembly/script/schema/pgsql-schema.sql   |  34 ++++
 .../main/assembly/script/upgrade/mysql/2.2.0.sql   |  62 +++++++
 .../main/assembly/script/upgrade/pgsql/2.2.0.sql   |  68 ++++++++
 .../core/controller/ResourceController.java        |  90 ++++++++++
 .../console/core/entity/Application.java           |   1 +
 .../streampark/console/core/entity/FlinkSql.java   |  13 +-
 .../streampark/console/core/entity/Resource.java   |  67 ++++++++
 .../streampark/console/core/enums/ChangedType.java |   7 +-
 .../console/core/enums/ResourceType.java           |  46 +++--
 .../console/core/mapper/ResourceMapper.java}       |  40 ++---
 .../console/core/service/ResourceService.java      |  74 ++++++++
 .../core/service/impl/AppBuildPipeServiceImpl.java |  43 ++++-
 .../core/service/impl/ApplicationServiceImpl.java  |   4 +-
 .../core/service/impl/ResourceServiceImpl.java     | 184 ++++++++++++++++++++
 .../src/main/resources/db/data-h2.sql              |  14 +-
 .../src/main/resources/db/schema-h2.sql            |  17 ++
 .../main/resources/mapper/core/FlinkSqlMapper.xml  |   1 +
 .../main/resources/mapper/core/ResourceMapper.xml  |  47 ++++++
 .../src/api/flink/app/app.type.ts                  |   1 +
 .../src/api/flink/resource/index.ts                |  79 +++++++++
 .../flink/resource/model/resourceModel.ts}         |  56 ++++---
 .../src/locales/lang/en/flink/app.ts               |   4 +
 .../src/locales/lang/en/flink/resource.ts          |  43 +++++
 .../src/locales/lang/en/menu.ts                    |   1 +
 .../src/locales/lang/zh-CN/flink/app.ts            |   4 +
 .../lang/{en/menu.ts => zh-CN/flink/resource.ts}   |  42 ++---
 .../src/locales/lang/zh-CN/menu.ts                 |   1 +
 .../src/views/flink/app/Add.vue                    |  16 +-
 .../src/views/flink/app/EditStreamPark.vue         |   8 +-
 .../flink/app/hooks/useCreateAndEditSchema.ts      |  23 +++
 .../src/views/flink/app/hooks/useCreateSchema.ts   |  11 +-
 .../src/views/flink/app/hooks/useFlinkRender.tsx   |  90 ++++++++++
 .../src/views/flink/app/utils/index.ts             |   7 +
 .../src/views/flink/resource/View.vue              | 186 +++++++++++++++++++++
 .../flink/resource/components/ResourceDrawer.vue   | 176 +++++++++++++++++++
 .../src/views/flink/resource/resource.data.ts      |  70 ++++++++
 .../flink/packer/maven/DependencyInfo.scala        |   3 +
 40 files changed, 1565 insertions(+), 115 deletions(-)

diff --git 
a/streampark-console/streampark-console-service/src/main/assembly/script/data/mysql-data.sql
 
b/streampark-console/streampark-console-service/src/main/assembly/script/data/mysql-data.sql
index 71001d79b..b4a7ca4a0 100644
--- 
a/streampark-console/streampark-console-service/src/main/assembly/script/data/mysql-data.sql
+++ 
b/streampark-console/streampark-console-service/src/main/assembly/script/data/mysql-data.sql
@@ -45,7 +45,7 @@ insert into `t_flink_project` values (100000, 100000, 
'streampark-quickstart', '
 -- ----------------------------
 -- Records of t_flink_sql
 -- ----------------------------
-insert into `t_flink_sql` values (100000, 100000, 
'eNqlUUtPhDAQvu+vmFs1AYIHT5s94AaVqGxSSPZIKgxrY2mxrdGfb4GS3c0+LnJo6Mz36syapkmZQpk8vKbQMMt2KOFmAe5rK4Nf3yhrhCwvA1/TTDaqO61UxmooSprlT1PDGkgKEKpmwvIOjWVdP3W2zpG+JfQFHjfU46xxrVvYZuWztye1khJrqzSBFRCfjUwSYQiqt1xJJvyPcbWJp9WPCXvUoUEn0ZAVufcs0nIUjYn2L4s++YiY75eBLr+2Dnl3GYKTWRyfQKYRRR2XZxXmNvu9yh9GHAmUO/sxyMRkGNly4c714RZ7zaWtLHsX+N9NjvVrWxm99jmyvEhpOUhujmIYFI5zkCOYzYIj11a7QH7Tyz+nE8bw',
 null, 1, 1, now());
+insert into `t_flink_sql` values (100000, 100000, 
'eNqlUUtPhDAQvu+vmFs1AYIHT5s94AaVqGxSSPZIKgxrY2mxrdGfb4GS3c0+LnJo6Mz36syapkmZQpk8vKbQMMt2KOFmAe5rK4Nf3yhrhCwvA1/TTDaqO61UxmooSprlT1PDGkgKEKpmwvIOjWVdP3W2zpG+JfQFHjfU46xxrVvYZuWztye1khJrqzSBFRCfjUwSYQiqt1xJJvyPcbWJp9WPCXvUoUEn0ZAVufcs0nIUjYn2L4s++YiY75eBLr+2Dnl3GYKTWRyfQKYRRR2XZxXmNvu9yh9GHAmUO/sxyMRkGNly4c714RZ7zaWtLHsX+N9NjvVrWxm99jmyvEhpOUhujmIYFI5zkCOYzYIj11a7QH7Tyz+nE8bw',
 null, null, 1, 1, now());
 
 -- ----------------------------
 -- Records of t_menu
@@ -62,6 +62,7 @@ insert into `t_menu` values (110600, 110000, 
'menu.memberManagement', '/system/m
 insert into `t_menu` values (120100, 120000, 'menu.project', '/flink/project', 
'flink/project/View', null, 'github', '0', 1, 1, now(), now());
 insert into `t_menu` values (120200, 120000, 'menu.application', '/flink/app', 
'flink/app/View', null, 'mobile', '0', 1, 2, now(), now());
 insert into `t_menu` values (120300, 120000, 'menu.variable', 
'/flink/variable', 'flink/variable/View', null, 'code', '0', 1, 3, now(), 
now());
+insert into `t_menu` values (120400, 120000, 'menu.resource', 
'/flink/resource', 'flink/resource/View', null, 'apartment', '0', 1, 3, now(), 
now());
 insert into `t_menu` values (130100, 130000, 'setting.system', 
'/setting/system', 'setting/System/index', null, 'database', '0', 1, 1, now(), 
now());
 insert into `t_menu` values (130200, 130000, 'setting.alarm', 
'/setting/alarm', 'setting/Alarm/index', null, 'alert', '0', 1, 2, now(), 
now());
 insert into `t_menu` values (130300, 130000, 'setting.flinkHome', 
'/setting/flinkHome', 'setting/FlinkHome/index', null, 'desktop', '0', 1, 3, 
now(), now());
@@ -120,6 +121,9 @@ insert into `t_menu` values (120304, 120300, 'depend apps', 
'/flink/variable/dep
 insert into `t_menu` values (120305, 120300, 'show original', NULL, NULL, 
'variable:show_original', NULL, '1', 1, NULL, now(), now());
 insert into `t_menu` values (120306, 120300, 'view', NULL, NULL, 
'variable:view', NULL, '1', 1, null, now(), now());
 insert into `t_menu` values (120307, 120300, 'depend view', null, null, 
'variable:depend_apps', null, '1', 1, NULL, now(), now());
+insert into `t_menu` values (120401, 120400, 'add', NULL, NULL, 
'resource:add', NULL, '1', 1, NULL, now(), now());
+insert into `t_menu` values (120402, 120400, 'update', NULL, NULL, 
'resource:update', NULL, '1', 1, NULL, now(), now());
+insert into `t_menu` values (120403, 120400, 'delete', NULL, NULL, 
'resource:delete', NULL, '1', 1, NULL, now(), now());
 insert into `t_menu` values (130101, 130100, 'view', null, null, 
'setting:view', null, '1', 1, null, now(), now());
 insert into `t_menu` values (130102, 130100, 'setting update', null, null, 
'setting:update', null, '1', 1, null, now(), now());
 insert into `t_menu` values (130401, 130400, 'add cluster', 
'/setting/add_cluster', 'setting/FlinkCluster/AddCluster', 'cluster:create', 
'', '0', 0, null, now(), now());
@@ -167,6 +171,10 @@ insert into `t_role_menu` (role_id, menu_id) values 
(100001, 120300);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 120304);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 120306);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 120307);
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120400);
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120401);
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120402);
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120403);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 130000);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 130100);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 130101);
@@ -211,6 +219,10 @@ insert into `t_role_menu` (role_id, menu_id) values 
(100002, 120304);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 120305);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 120306);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 120307);
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120400);
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120401);
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120402);
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120403);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 130000);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 130100);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 130101);
diff --git 
a/streampark-console/streampark-console-service/src/main/assembly/script/data/pgsql-data.sql
 
b/streampark-console/streampark-console-service/src/main/assembly/script/data/pgsql-data.sql
index 968c5ce50..98b787187 100644
--- 
a/streampark-console/streampark-console-service/src/main/assembly/script/data/pgsql-data.sql
+++ 
b/streampark-console/streampark-console-service/src/main/assembly/script/data/pgsql-data.sql
@@ -41,7 +41,7 @@ insert into "public"."t_flink_project" values (100000, 
100000, 'streampark-quick
 -- ----------------------------
 -- Records of t_flink_sql
 -- ----------------------------
-insert into "public"."t_flink_sql" values (100000, 100000, 
'eNqlUUtPhDAQvu+vmFs1AYIHT5s94AaVqGxSSPZIKgxrY2mxrdGfb4GS3c0+LnJo6Mz36syapkmZQpk8vKbQMMt2KOFmAe5rK4Nf3yhrhCwvA1/TTDaqO61UxmooSprlT1PDGkgKEKpmwvIOjWVdP3W2zpG+JfQFHjfU46xxrVvYZuWztye1khJrqzSBFRCfjUwSYQiqt1xJJvyPcbWJp9WPCXvUoUEn0ZAVufcs0nIUjYn2L4s++YiY75eBLr+2Dnl3GYKTWRyfQKYRRR2XZxXmNvu9yh9GHAmUO/sxyMRkGNly4c714RZ7zaWtLHsX+N9NjvVrWxm99jmyvEhpOUhujmIYFI5zkCOYzYIj11a7QH7Tyz+nE8bw',
 null, 1, 1, now());
+insert into "public"."t_flink_sql" values (100000, 100000, 
'eNqlUUtPhDAQvu+vmFs1AYIHT5s94AaVqGxSSPZIKgxrY2mxrdGfb4GS3c0+LnJo6Mz36syapkmZQpk8vKbQMMt2KOFmAe5rK4Nf3yhrhCwvA1/TTDaqO61UxmooSprlT1PDGkgKEKpmwvIOjWVdP3W2zpG+JfQFHjfU46xxrVvYZuWztye1khJrqzSBFRCfjUwSYQiqt1xJJvyPcbWJp9WPCXvUoUEn0ZAVufcs0nIUjYn2L4s++YiY75eBLr+2Dnl3GYKTWRyfQKYRRR2XZxXmNvu9yh9GHAmUO/sxyMRkGNly4c714RZ7zaWtLHsX+N9NjvVrWxm99jmyvEhpOUhujmIYFI5zkCOYzYIj11a7QH7Tyz+nE8bw',
 null, null, 1, 1, now());
 
 -- ----------------------------
 -- Records of t_menu
@@ -58,6 +58,7 @@ insert into "public"."t_menu" values (110600, 110000, 
'menu.memberManagement', '
 insert into "public"."t_menu" values (120100, 120000, 'menu.project', 
'/flink/project', 'flink/project/View', null, 'github', '0', '1', 1, now(), 
now());
 insert into "public"."t_menu" values (120200, 120000, 'menu.application', 
'/flink/app', 'flink/app/View', null, 'mobile', '0', '1', 2, now(), now());
 insert into "public"."t_menu" values (120300, 120000, 'menu.variable', 
'/flink/variable', 'flink/variable/View', null, 'code', '0', '1', 3, now(), 
now());
+insert into "public"."t_menu" values (120400, 120000, 'menu.resource', 
'/flink/resource', 'flink/resource/View', null, 'apartment', '0', '1', 3, 
now(), now());
 insert into "public"."t_menu" values (130100, 130000, 'setting.system', 
'/setting/system', 'setting/System/index', null, 'database', '0', '1', 1, 
now(), now());
 insert into "public"."t_menu" values (130200, 130000, 'setting.alarm', 
'/setting/alarm', 'setting/Alarm/index', null, 'alert', '0', '1', 2, now(), 
now());
 insert into "public"."t_menu" values (130300, 130000, 'setting.flinkHome', 
'/setting/flinkHome', 'setting/FlinkHome/index', null, 'desktop', '0', '1', 3, 
now(), now());
@@ -116,6 +117,9 @@ insert into "public"."t_menu" values (120304, 120300, 
'depend apps', '/flink/var
 insert into "public"."t_menu" values (120305, 120300, 'show original', NULL, 
NULL, 'variable:show_original', NULL, '1', '1', NULL, now(), now());
 insert into "public"."t_menu" values (120306, 120300, 'view', NULL, NULL, 
'variable:view', NULL, '1', '1', null, now(), now());
 insert into "public"."t_menu" values (120307, 120300, 'depend view', null, 
null, 'variable:depend_apps', null, '1', '1', NULL, now(), now());
+insert into "public"."t_menu" values (120401, 120400, 'add', NULL, NULL, 
'resource:add', NULL, '1', '1', NULL, now(), now());
+insert into "public"."t_menu" values (120402, 120400, 'update', NULL, NULL, 
'resource:update', NULL, '1', '1', NULL, now(), now());
+insert into "public"."t_menu" values (120403, 120400, 'delete', NULL, NULL, 
'resource:delete', NULL, '1', '1', NULL, now(), now());
 insert into "public"."t_menu" values (130101, 130100, 'view', null, null, 
'setting:view', null, '1', '1', null, now(), now());
 insert into "public"."t_menu" values (130102, 130100, 'setting update', null, 
null, 'setting:update', null, '1', '1', null, now(), now());
 insert into "public"."t_menu" values (130401, 130400, 'add cluster', 
'/setting/add_cluster', 'setting/FlinkCluster/AddCluster', 'cluster:create', 
'', '0', '0', null, now(), now());
@@ -163,6 +167,10 @@ insert into "public"."t_role_menu" (role_id, menu_id) 
values (100001, 120300);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 120304);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 120306);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 120307);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 120400);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 120401);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 120402);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 120403);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 130000);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 130100);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 130101);
@@ -207,6 +215,10 @@ insert into "public"."t_role_menu" (role_id, menu_id) 
values (100002, 120304);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 120305);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 120306);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 120307);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 120400);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 120401);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 120402);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 120403);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 130000);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 130100);
 insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 130101);
diff --git 
a/streampark-console/streampark-console-service/src/main/assembly/script/schema/mysql-schema.sql
 
b/streampark-console/streampark-console-service/src/main/assembly/script/schema/mysql-schema.sql
index 9c5e3264d..bd484b520 100644
--- 
a/streampark-console/streampark-console-service/src/main/assembly/script/schema/mysql-schema.sql
+++ 
b/streampark-console/streampark-console-service/src/main/assembly/script/schema/mysql-schema.sql
@@ -228,6 +228,7 @@ create table `t_flink_sql` (
   `id` bigint not null auto_increment,
   `app_id` bigint default null,
   `sql` text collate utf8mb4_general_ci,
+  `team_resource` varchar(64) collate utf8mb4_general_ci,
   `dependency` text collate utf8mb4_general_ci,
   `version` int default null,
   `candidate` tinyint not null default 1,
@@ -307,6 +308,24 @@ create table `t_variable` (
   unique key `un_team_vcode_inx` (`team_id`,`variable_code`) using btree
 ) engine=innodb auto_increment=100000 default charset=utf8mb4 
collate=utf8mb4_general_ci;
 
+-- ----------------------------
+-- Table of t_resource
+-- ----------------------------
+drop table if exists `t_resource`;
+create table `t_resource` (
+  `id` bigint not null auto_increment,
+  `resource_name` varchar(128) collate utf8mb4_general_ci not null comment 
'The name of the resource file',
+  `resource_type` int  not null comment '0:app 1:common 2:connector 3:format 
4:udf',
+  `main_class` varchar(255) collate utf8mb4_general_ci default null,
+  `description` text collate utf8mb4_general_ci default null comment 'More 
detailed description of resource',
+  `creator_id` bigint collate utf8mb4_general_ci not null comment 'user id of 
creator',
+  `team_id` bigint collate utf8mb4_general_ci not null comment 'team id',
+  `create_time` datetime not null default current_timestamp comment 'create 
time',
+  `modify_time` datetime not null default current_timestamp on update 
current_timestamp comment 'modify time',
+  primary key (`id`) using btree,
+  unique key `un_team_vcode_inx` (`team_id`,`resource_name`) using btree
+) engine=innodb auto_increment=100000 default charset=utf8mb4 
collate=utf8mb4_general_ci;
+
 -- ----------------------------
 -- Table structure for t_role
 -- ----------------------------
diff --git 
a/streampark-console/streampark-console-service/src/main/assembly/script/schema/pgsql-schema.sql
 
b/streampark-console/streampark-console-service/src/main/assembly/script/schema/pgsql-schema.sql
index a86f81f38..c6252700c 100644
--- 
a/streampark-console/streampark-console-service/src/main/assembly/script/schema/pgsql-schema.sql
+++ 
b/streampark-console/streampark-console-service/src/main/assembly/script/schema/pgsql-schema.sql
@@ -473,6 +473,7 @@ create table "public"."t_flink_sql" (
   "id" int8 not null default 
nextval('streampark_t_flink_sql_id_seq'::regclass),
   "app_id" int8,
   "sql" text collate "pg_catalog"."default",
+  "team_resource" varchar(64) collate "pg_catalog"."default",
   "dependency" text collate "pg_catalog"."default",
   "version" int4,
   "candidate" int2 default 1 not null,
@@ -598,6 +599,39 @@ create index "un_team_vcode_inx" on "public"."t_variable" 
using btree (
   "variable_code" collate "pg_catalog"."default" "pg_catalog"."text_ops" asc 
nulls last
 );
 
+-- ----------------------------
+-- Table of t_resource
+-- ----------------------------
+create sequence "public"."streampark_t_resource_id_seq"
+    increment 1 start 10000 cache 1 minvalue 10000 maxvalue 
9223372036854775807;
+
+create table "public"."t_resource" (
+                                       "id" int8 not null default 
nextval('streampark_t_resource_id_seq'::regclass),
+                                       "resource_name" varchar(128) collate 
"pg_catalog"."default" not null,
+                                       "resource_type" int4,
+                                       "main_class" varchar(255) collate 
"pg_catalog"."default",
+                                       "description" text collate 
"pg_catalog"."default" default null,
+                                       "creator_id" int8  not null,
+                                       "team_id" int8  not null,
+                                       "create_time" timestamp(6) not null 
default timezone('UTC-8'::text, (now())::timestamp(0) without time zone),
+                                       "modify_time" timestamp(6) not null 
default timezone('UTC-8'::text, (now())::timestamp(0) without time zone)
+)
+;
+comment on column "public"."t_resource"."id" is 'Resource id';
+comment on column "public"."t_resource"."resource_name" is 'Resource name';
+comment on column "public"."t_resource"."resource_type" is '0:app 1:common 
2:connector 3:format 4:udf';
+comment on column "public"."t_resource"."main_class" is 'The program main 
class';
+comment on column "public"."t_resource"."description" is 'More detailed 
description of resource';
+comment on column "public"."t_resource"."creator_id" is 'user id of creator';
+comment on column "public"."t_resource"."team_id" is 'team id';
+comment on column "public"."t_resource"."create_time" is 'creation time';
+comment on column "public"."t_resource"."modify_time" is 'modify time';
+
+alter table "public"."t_resource" add constraint "t_resource_pkey" primary key 
("id");
+create index "un_team_dname_inx" on "public"."t_resource" using btree (
+    "team_id" "pg_catalog"."int8_ops" asc nulls last,
+    "resource_name" collate "pg_catalog"."default" "pg_catalog"."text_ops" asc 
nulls last
+    );
 
 -- ----------------------------
 -- table structure for t_role
diff --git 
a/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/mysql/2.2.0.sql
 
b/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/mysql/2.2.0.sql
new file mode 100644
index 000000000..073759f9c
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/mysql/2.2.0.sql
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use streampark;
+
+set names utf8mb4;
+set foreign_key_checks = 0;
+
+-- ----------------------------
+-- Table of t_resource
+-- ----------------------------
+drop table if exists `t_resource`;
+create table `t_resource` (
+                                `id` bigint not null auto_increment,
+                                `resource_name` varchar(128) collate 
utf8mb4_general_ci not null comment 'The name of the resource file',
+                                `resource_type` int  not null comment '0:app 
1:common 2:connector 3:format 4:udf',
+                                `main_class` varchar(255) collate 
utf8mb4_general_ci default null,
+                                `description` text collate utf8mb4_general_ci 
default null comment 'More detailed description of resource',
+                                `creator_id` bigint collate utf8mb4_general_ci 
not null comment 'user id of creator',
+                                `team_id` bigint collate utf8mb4_general_ci 
not null comment 'team id',
+                                `create_time` datetime not null default 
current_timestamp comment 'create time',
+                                `modify_time` datetime not null default 
current_timestamp on update current_timestamp comment 'modify time',
+                                primary key (`id`) using btree,
+                                unique key `un_team_vcode_inx` 
(`team_id`,`resource_name`) using btree
+) engine=innodb auto_increment=100000 default charset=utf8mb4 
collate=utf8mb4_general_ci;
+
+alter table `t_flink_sql`
+    add column `team_resource` varchar(64) default null;
+
+-- menu level 2
+insert into `t_menu` values (120400, 120000, 'menu.resource', 
'/flink/resource', 'flink/resource/View', null, 'apartment', '0', 1, 3, now(), 
now());
+-- menu level 3
+insert into `t_menu` values (120401, 120400, 'add', NULL, NULL, 
'resource:add', NULL, '1', 1, NULL, now(), now());
+insert into `t_menu` values (120402, 120400, 'update', NULL, NULL, 
'resource:update', NULL, '1', 1, NULL, now(), now());
+insert into `t_menu` values (120403, 120400, 'delete', NULL, NULL, 
'resource:delete', NULL, '1', 1, NULL, now(), now());
+
+-- role menu script
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120400);
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120401);
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120402);
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120403);
+
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120400);
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120401);
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120402);
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120403);
+
+set foreign_key_checks = 1;
diff --git 
a/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/pgsql/2.2.0.sql
 
b/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/pgsql/2.2.0.sql
new file mode 100644
index 000000000..a64a57b66
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/assembly/script/upgrade/pgsql/2.2.0.sql
@@ -0,0 +1,68 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+-- ----------------------------
+-- Table of t_resource
+-- ----------------------------
+create sequence "public"."streampark_t_resource_id_seq"
+    increment 1 start 10000 cache 1 minvalue 10000 maxvalue 
9223372036854775807;
+
+create table "public"."t_resource" (
+                                       "id" int8 not null default 
nextval('streampark_t_resource_id_seq'::regclass),
+                                       "resource_name" varchar(128) collate 
"pg_catalog"."default" not null,
+                                       "resource_type" int4,
+                                       "main_class" varchar(255) collate 
"pg_catalog"."default",
+                                       "description" text collate 
"pg_catalog"."default" default null,
+                                       "creator_id" int8  not null,
+                                       "team_id" int8  not null,
+                                       "create_time" timestamp(6) not null 
default timezone('UTC-8'::text, (now())::timestamp(0) without time zone),
+                                       "modify_time" timestamp(6) not null 
default timezone('UTC-8'::text, (now())::timestamp(0) without time zone)
+)
+;
+comment on column "public"."t_resource"."id" is 'Resource id';
+comment on column "public"."t_resource"."resource_name" is 'Resource name';
+comment on column "public"."t_resource"."resource_type" is '0:app 1:common 
2:connector 3:format 4:udf';
+comment on column "public"."t_resource"."main_class" is 'The program main 
class';
+comment on column "public"."t_resource"."description" is 'More detailed 
description of resource';
+comment on column "public"."t_resource"."creator_id" is 'user id of creator';
+comment on column "public"."t_resource"."team_id" is 'team id';
+comment on column "public"."t_resource"."create_time" is 'creation time';
+comment on column "public"."t_resource"."modify_time" is 'modify time';
+
+alter table "public"."t_resource" add constraint "t_resource_pkey" primary key 
("id");
+create index "un_team_dname_inx" on "public"."t_resource" using btree (
+    "team_id" "pg_catalog"."int8_ops" asc nulls last,
+    "resource_name" collate "pg_catalog"."default" "pg_catalog"."text_ops" asc 
nulls last
+    );
+
+alter table "public"."t_flink_sql"
+    add column "team_resource" varchar(64) default null;
+
+insert into "public"."t_menu" values (120400, 120000, 'menu.resource', 
'/flink/resource', 'flink/resource/View', null, 'apartment', '0', '1', 3, 
now(), now());
+insert into "public"."t_menu" values (110401, 110400, 'add', null, null, 
'token:add', null, '1', '1', null, now(), now());
+insert into "public"."t_menu" values (110402, 110400, 'delete', null, null, 
'token:delete', null, '1', '1', null, now(), now());
+insert into "public"."t_menu" values (110403, 110400, 'view', null, null, 
'token:view', null, '1', '1', null, now(), now());
+
+insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 120400);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 120401);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 120402);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100001, 120403);
+
+insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 120400);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 120401);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 120402);
+insert into "public"."t_role_menu" (role_id, menu_id) values (100002, 120403);
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ResourceController.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ResourceController.java
new file mode 100644
index 000000000..27c95b555
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ResourceController.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.streampark.console.core.controller;
+
+import org.apache.streampark.console.base.domain.RestRequest;
+import org.apache.streampark.console.base.domain.RestResponse;
+import org.apache.streampark.console.core.entity.Resource;
+import org.apache.streampark.console.core.service.ResourceService;
+
+import org.apache.shiro.authz.annotation.RequiresPermissions;
+
+import com.baomidou.mybatisplus.core.metadata.IPage;
+import io.swagger.v3.oas.annotations.Operation;
+import io.swagger.v3.oas.annotations.tags.Tag;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.validation.annotation.Validated;
+import org.springframework.web.bind.annotation.DeleteMapping;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+import org.springframework.web.bind.annotation.RestController;
+
+import javax.validation.Valid;
+
+import java.util.List;
+
+@Tag(name = "RESOURCE_TAG")
+@Slf4j
+@Validated
+@RestController
+@RequestMapping("resource")
+public class ResourceController {
+
+  @Autowired private ResourceService resourceService;
+
+  @Operation(summary = "add resource")
+  @PostMapping("add")
+  @RequiresPermissions("resource:add")
+  public RestResponse addResource(@Valid Resource resource) {
+    this.resourceService.addResource(resource);
+    return RestResponse.success();
+  }
+
+  @Operation(summary = "List resources")
+  @PostMapping("page")
+  public RestResponse page(RestRequest restRequest, Resource resource) {
+    IPage<Resource> page = resourceService.page(resource, restRequest);
+    return RestResponse.success(page);
+  }
+
+  @Operation(summary = "Update resource")
+  @PutMapping("update")
+  @RequiresPermissions("resource:update")
+  public RestResponse updateResource(@Valid Resource resource) {
+    resourceService.updateResource(resource);
+    return RestResponse.success();
+  }
+
+  @Operation(summary = "Delete resource")
+  @DeleteMapping("delete")
+  @RequiresPermissions("resource:delete")
+  public RestResponse deleteResource(@Valid Resource resource) {
+    this.resourceService.deleteResource(resource);
+    return RestResponse.success();
+  }
+
+  @Operation(summary = "List resource")
+  @PostMapping("list")
+  public RestResponse listResource(@RequestParam Long teamId) {
+    List<Resource> resourceList = resourceService.findByTeamId(teamId);
+    return RestResponse.success(resourceList);
+  }
+}
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
index 81f4ccc4c..2bc94c5da 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
@@ -220,6 +220,7 @@ public class Application implements Serializable {
   /** running job */
   private transient JobsOverview.Task overview;
 
+  private transient String teamResource;
   private transient String dependency;
   private transient Long sqlId;
   private transient String flinkSql;
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkSql.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkSql.java
index 57be322d7..d94d8ed8d 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkSql.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkSql.java
@@ -18,6 +18,7 @@
 package org.apache.streampark.console.core.entity;
 
 import org.apache.streampark.common.util.DeflaterUtils;
+import org.apache.streampark.console.base.util.ObjectUtils;
 import org.apache.streampark.console.core.enums.ChangedType;
 
 import com.baomidou.mybatisplus.annotation.IdType;
@@ -41,6 +42,7 @@ public class FlinkSql {
   @TableField("`sql`")
   private String sql;
 
+  private String teamResource;
   private String dependency;
   private Integer version = 1;
 
@@ -63,6 +65,7 @@ public class FlinkSql {
   public FlinkSql(Application application) {
     this.appId = application.getId();
     this.sql = application.getFlinkSql();
+    this.teamResource = application.getTeamResource();
     this.dependency = application.getDependency();
     this.createTime = new Date();
   }
@@ -75,6 +78,7 @@ public class FlinkSql {
     String encode = Base64.getEncoder().encodeToString(this.sql.getBytes());
     application.setFlinkSql(encode);
     application.setDependency(this.dependency);
+    application.setTeamResource(this.teamResource);
     application.setSqlId(this.id);
   }
 
@@ -86,9 +90,11 @@ public class FlinkSql {
         Application.Dependency.toDependency(this.getDependency());
     Application.Dependency targetDependency =
         Application.Dependency.toDependency(target.getDependency());
-
     boolean depDifference = !thisDependency.eq(targetDependency);
-    if (sqlDifference && depDifference) {
+    // 3) determine if team resource has changed
+    boolean teamResDifference =
+        !ObjectUtils.safeEquals(this.teamResource, target.getTeamResource());
+    if (sqlDifference && depDifference && teamResDifference) {
       return ChangedType.ALL;
     }
     if (sqlDifference) {
@@ -97,6 +103,9 @@ public class FlinkSql {
     if (depDifference) {
       return ChangedType.DEPENDENCY;
     }
+    if (teamResDifference) {
+      return ChangedType.TEAM_RESOURCE;
+    }
     return ChangedType.NONE;
   }
 
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Resource.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Resource.java
new file mode 100644
index 000000000..d516c0376
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Resource.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.streampark.console.core.entity;
+
+import org.apache.streampark.console.core.enums.ResourceType;
+
+import com.baomidou.mybatisplus.annotation.IdType;
+import com.baomidou.mybatisplus.annotation.TableId;
+import com.baomidou.mybatisplus.annotation.TableName;
+import lombok.Data;
+
+import javax.validation.constraints.NotNull;
+import javax.validation.constraints.Size;
+
+import java.io.Serializable;
+import java.util.Date;
+
+@Data
+@TableName("t_resource")
+public class Resource implements Serializable {
+
+  private static final long serialVersionUID = -7720746591258904369L;
+
+  @TableId(type = IdType.AUTO)
+  private Long id;
+
+  private String resourceName;
+
+  @Size(max = 100, message = "{noMoreThan}")
+  private String description;
+
+  /** user id of creator */
+  private Long creatorId;
+
+  private ResourceType resourceType;
+
+  private String mainClass;
+
+  /** user name of creator */
+  private transient String creatorName;
+
+  @NotNull(message = "{required}")
+  private Long teamId;
+
+  private transient Date createTime;
+
+  private transient Date modifyTime;
+
+  private transient String sortField;
+
+  private transient String sortOrder;
+}
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ChangedType.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ChangedType.java
index 67d3f8426..1fcdcf2cd 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ChangedType.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ChangedType.java
@@ -31,7 +31,10 @@ public enum ChangedType implements Serializable {
   SQL(2),
 
   /** both */
-  ALL(3);
+  ALL(3),
+
+  /** team dependency has changed */
+  TEAM_RESOURCE(4);
 
   private final int value;
 
@@ -56,7 +59,7 @@ public enum ChangedType implements Serializable {
   }
 
   public boolean isDependencyChanged() {
-    return this.equals(ALL) || this.equals(DEPENDENCY);
+    return this.equals(ALL) || this.equals(DEPENDENCY) || 
this.equals(TEAM_RESOURCE);
   }
 
   @Override
diff --git 
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/maven/DependencyInfo.scala
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceType.java
similarity index 52%
copy from 
streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/maven/DependencyInfo.scala
copy to 
streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceType.java
index c80e416e6..de6754ea1 100644
--- 
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/maven/DependencyInfo.scala
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceType.java
@@ -15,29 +15,39 @@
  * limitations under the License.
  */
 
-package org.apache.streampark.flink.packer.maven
+package org.apache.streampark.console.core.enums;
 
-import java.util.{List => JavaList}
+import com.baomidou.mybatisplus.annotation.EnumValue;
 
-import scala.collection.JavaConversions._
+import java.util.Arrays;
 
-/**
- * @param mavenArts
- *   collection of maven artifacts
- * @param extJarLibs
- *   collection of jar lib paths, which elements can be a directory or file 
path.
- */
-case class DependencyInfo(mavenArts: Set[Artifact] = Set(), extJarLibs: 
Set[String] = Set()) {
+/** The resource type. */
+public enum ResourceType {
 
-  def this(mavenArts: JavaList[Artifact], extJarLibs: JavaList[String]) {
-    this(mavenArts.toSet, extJarLibs.toSet)
-  }
+  /** User app */
+  APP(0),
+  /** Common resource, like mysql-jdbc */
+  COMMON(1),
 
-  def merge(jarLibs: Set[String]): DependencyInfo =
-    if (jarLibs != null) DependencyInfo(mavenArts, extJarLibs ++ jarLibs) else 
this.copy()
+  /** Flink connector plugin */
+  CONNECTOR(2),
+  /** Flink format plugin */
+  FORMAT(3),
 
-}
+  /** User designed function, including udf/udaf/udtf */
+  UDF(4);
 
-object DependencyInfo {
-  def empty: DependencyInfo = new DependencyInfo()
+  @EnumValue private final int code;
+
+  ResourceType(int code) {
+    this.code = code;
+  }
+
+  public int getCode() {
+    return code;
+  }
+
+  public static ResourceType of(Integer code) {
+    return Arrays.stream(values()).filter((x) -> x.code == 
code).findFirst().orElse(null);
+  }
 }
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/menu.ts 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/mapper/ResourceMapper.java
similarity index 53%
copy from 
streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/menu.ts
copy to 
streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/mapper/ResourceMapper.java
index b3b8f3183..0901f2eab 100644
--- 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/menu.ts
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/mapper/ResourceMapper.java
@@ -6,7 +6,7 @@
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
  *
- *    https://www.apache.org/licenses/LICENSE-2.0
+ *    http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -14,26 +14,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-export default {
-  menu: {
-    system: '系统管理',
-    userManagement: '用户管理',
-    roleManagement: '角色管理',
-    menuManagement: '菜单管理',
-    tokenManagement: 'Token管理',
-    teamManagement: '团队管理',
-    memberManagement: '成员管理',
-    project: '项目管理',
-    application: '作业管理',
-    variable: '变量管理',
-    setting: '设置中心',
-  },
-  setting: {
-    system: '系统设置',
-    alarm: '告警设置',
-    flinkHome: 'Flink 版本',
-    flinkCluster: 'Flink集群',
-    externalLink: '扩展链接',
-    yarnQueue: 'Yarn 队列',
-  },
-};
+
+package org.apache.streampark.console.core.mapper;
+
+import org.apache.streampark.console.core.entity.Resource;
+
+import org.apache.ibatis.annotations.Param;
+
+import com.baomidou.mybatisplus.core.mapper.BaseMapper;
+import com.baomidou.mybatisplus.core.metadata.IPage;
+import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
+
+public interface ResourceMapper extends BaseMapper<Resource> {
+
+  IPage<Resource> page(Page<Resource> page, @Param("resource") Resource 
resource);
+}
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/ResourceService.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/ResourceService.java
new file mode 100644
index 000000000..8874d93a3
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/ResourceService.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.streampark.console.core.service;
+
+import org.apache.streampark.console.base.domain.RestRequest;
+import org.apache.streampark.console.core.entity.Resource;
+
+import com.baomidou.mybatisplus.core.metadata.IPage;
+import com.baomidou.mybatisplus.extension.service.IService;
+
+import java.util.List;
+
+public interface ResourceService extends IService<Resource> {
+
+  /**
+   * list resource
+   *
+   * @param resource resource
+   * @param restRequest queryRequest
+   * @return IPage
+   */
+  IPage<Resource> page(Resource resource, RestRequest restRequest);
+
+  /**
+   * add resource
+   *
+   * @param resource resource
+   */
+  void addResource(Resource resource);
+
+  /**
+   * @param teamId team id
+   * @param name resource name
+   * @return the found resource
+   */
+  Resource findByResourceName(Long teamId, String name);
+
+  /**
+   * update resource
+   *
+   * @param resource the updated resource
+   */
+  void updateResource(Resource resource);
+
+  /**
+   * delete resource
+   *
+   * @param resource
+   */
+  void deleteResource(Resource resource);
+
+  /**
+   * Get resource through team id.
+   *
+   * @param teamId
+   * @return team resources
+   */
+  List<Resource> findByTeamId(Long teamId);
+}
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java
index 6f8890bd2..f62f521e5 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java
@@ -27,6 +27,7 @@ import org.apache.streampark.common.util.FileUtils;
 import org.apache.streampark.common.util.ThreadUtils;
 import org.apache.streampark.common.util.Utils;
 import org.apache.streampark.console.base.exception.ApiAlertException;
+import org.apache.streampark.console.base.util.JacksonUtils;
 import org.apache.streampark.console.base.util.WebUtils;
 import org.apache.streampark.console.core.entity.AppBuildPipeline;
 import org.apache.streampark.console.core.entity.Application;
@@ -49,9 +50,11 @@ import 
org.apache.streampark.console.core.service.CommonService;
 import org.apache.streampark.console.core.service.FlinkEnvService;
 import org.apache.streampark.console.core.service.FlinkSqlService;
 import org.apache.streampark.console.core.service.MessageService;
+import org.apache.streampark.console.core.service.ResourceService;
 import org.apache.streampark.console.core.service.SettingService;
 import org.apache.streampark.console.core.task.FlinkRESTAPIWatcher;
 import org.apache.streampark.flink.packer.docker.DockerConf;
+import org.apache.streampark.flink.packer.maven.DependencyInfo;
 import org.apache.streampark.flink.packer.pipeline.BuildPipeline;
 import org.apache.streampark.flink.packer.pipeline.BuildResult;
 import org.apache.streampark.flink.packer.pipeline.DockerBuildSnapshot;
@@ -87,6 +90,7 @@ import 
org.springframework.transaction.annotation.Transactional;
 import javax.annotation.Nonnull;
 
 import java.io.File;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -124,6 +128,8 @@ public class AppBuildPipeServiceImpl
 
   @Autowired private ApplicationConfigService applicationConfigService;
 
+  @Autowired private ResourceService resourceService;
+
   private final ExecutorService executorService =
       new ThreadPoolExecutor(
           Runtime.getRuntime().availableProcessors() * 5,
@@ -152,6 +158,7 @@ public class AppBuildPipeServiceImpl
       FlinkSql flinkSql = newFlinkSql == null ? effectiveFlinkSql : 
newFlinkSql;
       Utils.notNull(flinkSql);
       app.setDependency(flinkSql.getDependency());
+      app.setTeamResource(flinkSql.getTeamResource());
     }
 
     // create pipeline instance
@@ -188,10 +195,15 @@ public class AppBuildPipeServiceImpl
               FsOperator fsOperator = app.getFsOperator();
               fsOperator.delete(appHome);
               if (app.isUploadJob()) {
-                File localJar = new File(WebUtils.getAppTempDir(), 
app.getJar());
+                File localJar =
+                    new File(
+                        String.format(
+                            "%s/%d/%s",
+                            Workspace.local().APP_UPLOADS(), app.getTeamId(), 
app.getJar()));
                 // upload jar copy to appHome
                 String uploadJar = appUploads.concat("/").concat(app.getJar());
                 checkOrElseUploadJar(app.getFsOperator(), localJar, uploadJar, 
appUploads);
+
                 switch (app.getApplicationType()) {
                   case STREAMPARK_FLINK:
                     fsOperator.mkdirs(app.getAppLib());
@@ -352,7 +364,7 @@ public class AppBuildPipeServiceImpl
                 localWorkspace,
                 yarnProvidedPath,
                 app.getDevelopmentMode(),
-                app.getDependencyInfo());
+                getMergedDependencyInfo(app));
         log.info("Submit params to building pipeline : {}", yarnAppRequest);
         return FlinkYarnApplicationBuildPipeline.of(yarnAppRequest);
       case YARN_PER_JOB:
@@ -368,7 +380,7 @@ public class AppBuildPipeServiceImpl
                 app.getExecutionModeEnum(),
                 app.getDevelopmentMode(),
                 flinkEnv.getFlinkVersion(),
-                app.getDependencyInfo());
+                getMergedDependencyInfo(app));
         log.info("Submit params to building pipeline : {}", buildRequest);
         return FlinkRemoteBuildPipeline.of(buildRequest);
       case KUBERNETES_NATIVE_SESSION:
@@ -381,7 +393,7 @@ public class AppBuildPipeServiceImpl
                 app.getExecutionModeEnum(),
                 app.getDevelopmentMode(),
                 flinkEnv.getFlinkVersion(),
-                app.getDependencyInfo(),
+                getMergedDependencyInfo(app),
                 app.getClusterId(),
                 app.getK8sNamespace());
         log.info("Submit params to building pipeline : {}", 
k8sSessionBuildRequest);
@@ -396,7 +408,7 @@ public class AppBuildPipeServiceImpl
                 app.getExecutionModeEnum(),
                 app.getDevelopmentMode(),
                 flinkEnv.getFlinkVersion(),
-                app.getDependencyInfo(),
+                getMergedDependencyInfo(app),
                 app.getClusterId(),
                 app.getK8sNamespace(),
                 app.getFlinkImage(),
@@ -505,4 +517,25 @@ public class AppBuildPipeServiceImpl
       }
     }
   }
+
+  private DependencyInfo getMergedDependencyInfo(Application application) {
+    DependencyInfo dependencyInfo = application.getDependencyInfo();
+
+    try {
+      String[] teamJarIds = JacksonUtils.read(application.getTeamResource(), 
String[].class);
+      List<String> teamJarsFullPath =
+          Arrays.stream(teamJarIds)
+              .map(jarId -> resourceService.getById(jarId).getResourceName())
+              .map(
+                  jar ->
+                      String.format(
+                          "%s/%d/%s",
+                          Workspace.local().APP_UPLOADS(), 
application.getTeamId(), jar))
+              .collect(Collectors.toList());
+      return dependencyInfo.merge(teamJarsFullPath);
+    } catch (Exception e) {
+      log.warn("Merge team dependency failed.");
+      return dependencyInfo;
+    }
+  }
 }
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationServiceImpl.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationServiceImpl.java
index 2242b897e..b6c55342f 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationServiceImpl.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationServiceImpl.java
@@ -718,7 +718,8 @@ public class ApplicationServiceImpl extends 
ServiceImpl<ApplicationMapper, Appli
     appParam.doSetHotParams();
     if (appParam.isUploadJob()) {
       String jarPath =
-          
WebUtils.getAppTempDir().getAbsolutePath().concat("/").concat(appParam.getJar());
+          String.format(
+              "%s/%d/%s", Workspace.local().APP_UPLOADS(), 
appParam.getTeamId(), appParam.getJar());
       appParam.setJarCheckSum(FileUtils.checksumCRC32(new File(jarPath)));
     }
 
@@ -803,6 +804,7 @@ public class ApplicationServiceImpl extends 
ServiceImpl<ApplicationMapper, Appli
       if (newApp.isFlinkSqlJob()) {
         FlinkSql copyFlinkSql = 
flinkSqlService.getLatestFlinkSql(appParam.getId(), true);
         newApp.setFlinkSql(copyFlinkSql.getSql());
+        newApp.setTeamResource(copyFlinkSql.getTeamResource());
         newApp.setDependency(copyFlinkSql.getDependency());
         FlinkSql flinkSql = new FlinkSql(newApp);
         flinkSqlService.create(flinkSql);
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java
new file mode 100644
index 000000000..b2894ab8e
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java
@@ -0,0 +1,184 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.streampark.console.core.service.impl;
+
+import org.apache.streampark.common.conf.Workspace;
+import org.apache.streampark.common.fs.FsOperator;
+import org.apache.streampark.console.base.domain.RestRequest;
+import org.apache.streampark.console.base.exception.ApiAlertException;
+import org.apache.streampark.console.base.mybatis.pager.MybatisPager;
+import org.apache.streampark.console.base.util.WebUtils;
+import org.apache.streampark.console.core.entity.Application;
+import org.apache.streampark.console.core.entity.FlinkSql;
+import org.apache.streampark.console.core.entity.Resource;
+import org.apache.streampark.console.core.mapper.ResourceMapper;
+import org.apache.streampark.console.core.service.ApplicationService;
+import org.apache.streampark.console.core.service.CommonService;
+import org.apache.streampark.console.core.service.FlinkSqlService;
+import org.apache.streampark.console.core.service.ResourceService;
+
+import org.apache.commons.collections.CollectionUtils;
+
+import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
+import com.baomidou.mybatisplus.core.metadata.IPage;
+import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
+import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+import org.springframework.transaction.annotation.Propagation;
+import org.springframework.transaction.annotation.Transactional;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+@Slf4j
+@Service
+@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, 
rollbackFor = Exception.class)
+public class ResourceServiceImpl extends ServiceImpl<ResourceMapper, Resource>
+    implements ResourceService {
+
+  @Autowired private ApplicationService applicationService;
+  @Autowired private CommonService commonService;
+  @Autowired private FlinkSqlService flinkSqlService;
+
+  @Override
+  public IPage<Resource> page(Resource resource, RestRequest restRequest) {
+    if (resource.getTeamId() == null) {
+      return null;
+    }
+    Page<Resource> page = new 
MybatisPager<Resource>().getDefaultPage(restRequest);
+    return this.baseMapper.page(page, resource);
+  }
+
+  @Override
+  public void addResource(Resource resource) {
+    String resourceName = resource.getResourceName();
+    ApiAlertException.throwIfNull(resourceName, "No resource uploaded.");
+
+    Long teamId = resource.getTeamId();
+    ApiAlertException.throwIfTrue(
+        this.findByResourceName(teamId, resourceName) != null,
+        String.format("Sorry, the resource %s already exists.", 
resource.getResourceName()));
+
+    // copy jar to team upload directory
+    transferTeamResource(teamId, resourceName);
+
+    resource.setCreatorId(commonService.getUserId());
+    this.save(resource);
+  }
+
+  @Override
+  public Resource findByResourceName(Long teamId, String name) {
+    LambdaQueryWrapper<Resource> queryWrapper =
+        new LambdaQueryWrapper<Resource>()
+            .eq(Resource::getResourceName, name)
+            .eq(Resource::getTeamId, teamId);
+    return baseMapper.selectOne(queryWrapper);
+  }
+
+  @Override
+  public void updateResource(Resource resource) {
+    Resource findResource = getById(resource.getId());
+    checkOrElseAlert(findResource);
+
+    String resourceName = resource.getResourceName();
+    if (resourceName != null) {
+      ApiAlertException.throwIfFalse(
+          resourceName.equals(findResource.getResourceName()),
+          "Please make sure the resource name is not changed.");
+      transferTeamResource(findResource.getTeamId(), resourceName);
+    }
+
+    findResource.setDescription(resource.getDescription());
+    baseMapper.updateById(findResource);
+  }
+
+  @Override
+  public void deleteResource(Resource resource) {
+    Resource findResource = getById(resource.getId());
+    checkOrElseAlert(findResource);
+
+    FsOperator.lfs()
+        .delete(
+            String.format(
+                "%s/%d/%s",
+                Workspace.local().APP_UPLOADS(),
+                findResource.getTeamId(),
+                findResource.getResourceName()));
+
+    this.removeById(resource);
+  }
+
+  public List<Resource> findByTeamId(Long teamId) {
+    LambdaQueryWrapper<Resource> queryWrapper =
+        new LambdaQueryWrapper<Resource>().eq(Resource::getTeamId, teamId);
+    return baseMapper.selectList(queryWrapper);
+  }
+
+  private void transferTeamResource(Long teamId, String resourceName) {
+    String teamUploads = String.format("%s/%d", 
Workspace.local().APP_UPLOADS(), teamId);
+    if (!FsOperator.lfs().exists(teamUploads)) {
+      FsOperator.lfs().mkdirs(teamUploads);
+    }
+    File localJar = new File(WebUtils.getAppTempDir(), resourceName);
+    File teamUploadJar = new File(teamUploads, resourceName);
+    ApiAlertException.throwIfFalse(
+        localJar.exists(), "Missing file: " + resourceName + ", please upload 
again");
+    FsOperator.lfs()
+        .upload(localJar.getAbsolutePath(), teamUploadJar.getAbsolutePath(), 
false, true);
+  }
+
+  private void checkOrElseAlert(Resource resource) {
+    ApiAlertException.throwIfNull(resource, "The resource does not exist.");
+
+    ApiAlertException.throwIfTrue(
+        isDependByApplications(resource),
+        "Sorry, the resource is still in use, cannot be removed.");
+  }
+
+  private boolean isDependByApplications(Resource resource) {
+    return CollectionUtils.isNotEmpty(getResourceApplicationsById(resource));
+  }
+
+  private List<Application> getResourceApplicationsById(Resource resource) {
+    List<Application> dependApplications = new ArrayList<>();
+    List<Application> applications = 
applicationService.getByTeamId(resource.getTeamId());
+    Map<Long, Application> applicationMap =
+        applications.stream()
+            .collect(Collectors.toMap(Application::getId, application -> 
application));
+
+    // Get the application that depends on this resource
+    List<FlinkSql> flinkSqls = 
flinkSqlService.getByTeamId(resource.getTeamId());
+    for (FlinkSql flinkSql : flinkSqls) {
+      String sqlTeamResource = flinkSql.getTeamResource();
+      if (sqlTeamResource != null
+          && sqlTeamResource.contains(String.valueOf(resource.getTeamId()))) {
+        Application app = applicationMap.get(flinkSql.getAppId());
+        if (!dependApplications.contains(app)) {
+          dependApplications.add(applicationMap.get(flinkSql.getAppId()));
+        }
+      }
+    }
+
+    return dependApplications;
+  }
+}
diff --git 
a/streampark-console/streampark-console-service/src/main/resources/db/data-h2.sql
 
b/streampark-console/streampark-console-service/src/main/resources/db/data-h2.sql
index 77494579a..964ccb171 100644
--- 
a/streampark-console/streampark-console-service/src/main/resources/db/data-h2.sql
+++ 
b/streampark-console/streampark-console-service/src/main/resources/db/data-h2.sql
@@ -40,7 +40,7 @@ insert into `t_flink_project` values (100000, 100000, 
'streampark-quickstart', '
 -- ----------------------------
 -- Records of t_flink_sql
 -- ----------------------------
-insert into `t_flink_sql` values (100000, 100000, 
'eNqlUUtPhDAQvu+vmFs1AYIHT5s94AaVqGxSSPZIKgxrY2mxrdGfb4GS3c0+LnJo6Mz36syapkmZQpk8vKbQMMt2KOFmAe5rK4Nf3yhrhCwvA1/TTDaqO61UxmooSprlT1PDGkgKEKpmwvIOjWVdP3W2zpG+JfQFHjfU46xxrVvYZuWztye1khJrqzSBFRCfjUwSYQiqt1xJJvyPcbWJp9WPCXvUoUEn0ZAVufcs0nIUjYn2L4s++YiY75eBLr+2Dnl3GYKTWRyfQKYRRR2XZxXmNvu9yh9GHAmUO/sxyMRkGNly4c714RZ7zaWtLHsX+N9NjvVrWxm99jmyvEhpOUhujmIYFI5zkCOYzYIj11a7QH7Tyz+nE8bw',
 null, 1, 1, now());
+insert into `t_flink_sql` values (100000, 100000, 
'eNqlUUtPhDAQvu+vmFs1AYIHT5s94AaVqGxSSPZIKgxrY2mxrdGfb4GS3c0+LnJo6Mz36syapkmZQpk8vKbQMMt2KOFmAe5rK4Nf3yhrhCwvA1/TTDaqO61UxmooSprlT1PDGkgKEKpmwvIOjWVdP3W2zpG+JfQFHjfU46xxrVvYZuWztye1khJrqzSBFRCfjUwSYQiqt1xJJvyPcbWJp9WPCXvUoUEn0ZAVufcs0nIUjYn2L4s++YiY75eBLr+2Dnl3GYKTWRyfQKYRRR2XZxXmNvu9yh9GHAmUO/sxyMRkGNly4c714RZ7zaWtLHsX+N9NjvVrWxm99jmyvEhpOUhujmIYFI5zkCOYzYIj11a7QH7Tyz+nE8bw',
 null, null, 1, 1, now());
 
 -- ----------------------------
 -- Records of t_menu
@@ -57,6 +57,7 @@ insert into `t_menu` values (110600, 110000, 
'menu.memberManagement', '/system/m
 insert into `t_menu` values (120100, 120000, 'menu.project', '/flink/project', 
'flink/project/View', null, 'github', '0', 1, 1, now(), now());
 insert into `t_menu` values (120200, 120000, 'menu.application', '/flink/app', 
'flink/app/View', null, 'mobile', '0', 1, 2, now(), now());
 insert into `t_menu` values (120300, 120000, 'menu.variable', 
'/flink/variable', 'flink/variable/View', null, 'code', '0', 1, 3, now(), 
now());
+insert into `t_menu` values (120400, 120000, 'menu.resource', 
'/flink/resource', 'flink/resource/View', null, 'apartment', '0', 1, 3, now(), 
now());
 insert into `t_menu` values (130100, 130000, 'setting.system', 
'/setting/system', 'setting/System/index', null, 'database', '0', 1, 1, now(), 
now());
 insert into `t_menu` values (130200, 130000, 'setting.alarm', 
'/setting/alarm', 'setting/Alarm/index', null, 'alert', '0', 1, 2, now(), 
now());
 insert into `t_menu` values (130300, 130000, 'setting.flinkHome', 
'/setting/flinkHome', 'setting/FlinkHome/index', null, 'desktop', '0', 1, 3, 
now(), now());
@@ -115,6 +116,9 @@ insert into `t_menu` values (120304, 120300, 'depend apps', 
'/flink/variable/dep
 insert into `t_menu` values (120305, 120300, 'show original', NULL, NULL, 
'variable:show_original', NULL, '1', 1, NULL, now(), now());
 insert into `t_menu` values (120306, 120300, 'view', NULL, NULL, 
'variable:view', NULL, '1', 1, null, now(), now());
 insert into `t_menu` values (120307, 120300, 'depend view', null, null, 
'variable:depend_apps', null, '1', 1, NULL, now(), now());
+insert into `t_menu` values (120401, 120400, 'add', NULL, NULL, 
'resource:add', NULL, '1', 1, NULL, now(), now());
+insert into `t_menu` values (120402, 120400, 'update', NULL, NULL, 
'resource:update', NULL, '1', 1, NULL, now(), now());
+insert into `t_menu` values (120403, 120400, 'delete', NULL, NULL, 
'resource:delete', NULL, '1', 1, NULL, now(), now());
 insert into `t_menu` values (130101, 130100, 'view', null, null, 
'setting:view', null, '1', 1, null, now(), now());
 insert into `t_menu` values (130102, 130100, 'setting update', null, null, 
'setting:update', null, '1', 1, null, now(), now());
 insert into `t_menu` values (130401, 130400, 'add cluster', 
'/setting/add_cluster', 'setting/FlinkCluster/AddCluster', 'cluster:create', 
'', '0', 0, null, now(), now());
@@ -162,6 +166,10 @@ insert into `t_role_menu` (role_id, menu_id) values 
(100001, 120300);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 120304);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 120306);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 120307);
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120400);
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120401);
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120402);
+insert into `t_role_menu` (role_id, menu_id) values (100001, 120403);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 130000);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 130100);
 insert into `t_role_menu` (role_id, menu_id) values (100001, 130101);
@@ -206,6 +214,10 @@ insert into `t_role_menu` (role_id, menu_id) values 
(100002, 120304);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 120305);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 120306);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 120307);
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120400);
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120401);
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120402);
+insert into `t_role_menu` (role_id, menu_id) values (100002, 120403);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 130000);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 130100);
 insert into `t_role_menu` (role_id, menu_id) values (100002, 130101);
diff --git 
a/streampark-console/streampark-console-service/src/main/resources/db/schema-h2.sql
 
b/streampark-console/streampark-console-service/src/main/resources/db/schema-h2.sql
index 3f7025a83..0ca748de1 100644
--- 
a/streampark-console/streampark-console-service/src/main/resources/db/schema-h2.sql
+++ 
b/streampark-console/streampark-console-service/src/main/resources/db/schema-h2.sql
@@ -205,6 +205,7 @@ create table if not exists `t_flink_sql` (
   `id` bigint generated by default as identity not null,
   `app_id` bigint default null,
   `sql` text ,
+  `team_resource` varchar(64) default null,
   `dependency` text ,
   `version` int default null,
   `candidate` tinyint not null default 1,
@@ -212,6 +213,22 @@ create table if not exists `t_flink_sql` (
   primary key(`id`)
 );
 
+-- ----------------------------
+-- Table of t_resource
+-- ----------------------------
+create table if not exists `t_resource` (
+  `id` bigint generated by default as identity not null,
+  `resource_name` varchar(128) not null comment 'The name of the resource 
file',
+  `resource_type` int not null comment '0:app 1:common 2:connector 3:format 
4:udf',
+  `main_class` varchar(255) default null,
+  `description` text default null comment 'More detailed description of 
resource',
+  `creator_id` bigint not null comment 'user id of creator',
+  `team_id` bigint not null comment 'team id',
+  `create_time` datetime not null default current_timestamp comment 'create 
time',
+  `modify_time` datetime not null default current_timestamp on update 
current_timestamp comment 'modify time',
+  primary key (`id`)
+);
+
 -- ----------------------------
 -- Table structure for t_menu
 -- ----------------------------
diff --git 
a/streampark-console/streampark-console-service/src/main/resources/mapper/core/FlinkSqlMapper.xml
 
b/streampark-console/streampark-console-service/src/main/resources/mapper/core/FlinkSqlMapper.xml
index cf110077b..ad267c797 100644
--- 
a/streampark-console/streampark-console-service/src/main/resources/mapper/core/FlinkSqlMapper.xml
+++ 
b/streampark-console/streampark-console-service/src/main/resources/mapper/core/FlinkSqlMapper.xml
@@ -22,6 +22,7 @@
         <result column="app_id" jdbcType="BIGINT" property="appId"/>
         <result column="sql" jdbcType="LONGVARCHAR" property="sql"/>
         <result column="candidate" jdbcType="INTEGER" property="candidate"/>
+        <result column="team_resource" jdbcType="VARCHAR" 
property="teamResource"/>
         <result column="dependency" jdbcType="VARCHAR" property="dependency"/>
         <result column="create_time" jdbcType="DATE" property="createTime"/>
     </resultMap>
diff --git 
a/streampark-console/streampark-console-service/src/main/resources/mapper/core/ResourceMapper.xml
 
b/streampark-console/streampark-console-service/src/main/resources/mapper/core/ResourceMapper.xml
new file mode 100644
index 000000000..53fd85caa
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/resources/mapper/core/ResourceMapper.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" 
"http://mybatis.org/dtd/mybatis-3-mapper.dtd";>
+<mapper namespace="org.apache.streampark.console.core.mapper.ResourceMapper">
+    <resultMap id="ResourceMap" 
type="org.apache.streampark.console.core.entity.Resource">
+        <result column="id" jdbcType="BIGINT" property="id"/>
+        <result column="resource_name" jdbcType="VARCHAR" 
property="resourceName"/>
+        <result column="description" jdbcType="VARCHAR" 
property="description"/>
+        <result column="resource_type" jdbcType="BIGINT" 
property="resourceType"/>
+        <result column="creator_id" jdbcType="BIGINT" property="creatorId"/>
+        <result column="team_id" jdbcType="BIGINT" property="teamId"/>
+        <result column="create_time" jdbcType="TIMESTAMP" 
property="createTime"/>
+        <result column="modify_time" jdbcType="TIMESTAMP" 
property="modifyTime"/>
+    </resultMap>
+
+    <select id="page" 
resultType="org.apache.streampark.console.core.entity.Resource" 
parameterType="org.apache.streampark.console.core.entity.Resource">
+        select
+        v.*,
+        u.username as creatorName
+        from t_resource v
+        inner join t_user u
+        on v.creator_id = u.user_id
+        and v.team_id = ${resource.teamId}
+        <if test="resource.resourceName != null and resource.resourceName != 
''">
+            and v.resourceName like '%${resource.resourceName}%'
+        </if>
+        <if test="resource.description != null and resource.description != ''">
+            and v.description like '%${resource.description}%'
+        </if>
+    </select>
+
+</mapper>
diff --git 
a/streampark-console/streampark-console-webapp/src/api/flink/app/app.type.ts 
b/streampark-console/streampark-console-webapp/src/api/flink/app/app.type.ts
index a56cb4dad..64b6f9a93 100644
--- a/streampark-console/streampark-console-webapp/src/api/flink/app/app.type.ts
+++ b/streampark-console/streampark-console-webapp/src/api/flink/app/app.type.ts
@@ -101,6 +101,7 @@ export interface AppListRecord {
   resourceFrom?: number;
   k8sHadoopIntegration: boolean;
   overview?: any;
+  teamResource?: any;
   dependency?: any;
   sqlId?: any;
   flinkSql?: any;
diff --git 
a/streampark-console/streampark-console-webapp/src/api/flink/resource/index.ts 
b/streampark-console/streampark-console-webapp/src/api/flink/resource/index.ts
new file mode 100644
index 000000000..3fc106fd3
--- /dev/null
+++ 
b/streampark-console/streampark-console-webapp/src/api/flink/resource/index.ts
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import { AxiosResponse } from 'axios';
+import { defHttp } from '/@/utils/http/axios';
+import { Result } from '/#/axios';
+import {
+  BasicTableParams,
+  ResourceDeleteParam,
+  ResourceListRecord,
+  ResourceParam,
+} from './model/resourceModel';
+
+enum RESOURCE_API {
+  PAGE = '/resource/page',
+  POST = '/resource/add',
+  UPDATE = '/resource/update',
+  DELETE = '/resource/delete',
+  LIST = '/resource/list',
+}
+
+/**
+ * get dependency list
+ * @param data
+ * @returns
+ */
+export function fetchResourceList(data: BasicTableParams): 
Promise<ResourceListRecord[]> {
+  return defHttp.post({ url: RESOURCE_API.PAGE, data });
+}
+
+/**
+ * add dependency
+ * @param {ResourceParam} data
+ * @returns {Promise<boolean>}
+ */
+export function fetchAddResource(data: ResourceParam): Promise<boolean> {
+  return defHttp.post({ url: RESOURCE_API.POST, data });
+}
+
+/**
+ * update dependency
+ * @param {ResourceParam} data
+ * @returns {Promise<boolean|undefined>}
+ */
+export function fetchUpdateResource(data: ResourceParam): Promise<boolean | 
undefined> {
+  return defHttp.put({ url: RESOURCE_API.UPDATE, data });
+}
+
+/**
+ * delete
+ * @param {ResourceDeleteParam} data
+ * @returns {Promise<AxiosResponse<Result>>}
+ */
+export function fetchResourceDelete(data: ResourceDeleteParam): 
Promise<AxiosResponse<Result>> {
+  return defHttp.delete({ url: RESOURCE_API.DELETE, data }, { 
isReturnNativeResponse: true });
+}
+
+/**
+ * get team dependency list
+ * @param data
+ * @returns
+ */
+export function fetchTeamResource(data: Recordable): 
Promise<ResourceListRecord[]> {
+  return defHttp.post({ url: RESOURCE_API.LIST, data });
+}
+
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts 
b/streampark-console/streampark-console-webapp/src/api/flink/resource/model/resourceModel.ts
similarity index 56%
copy from 
streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts
copy to 
streampark-console/streampark-console-webapp/src/api/flink/resource/model/resourceModel.ts
index e13a8a5bc..8f9b2754d 100644
--- a/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts
+++ 
b/streampark-console/streampark-console-webapp/src/api/flink/resource/model/resourceModel.ts
@@ -14,26 +14,36 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-export default {
-  menu: {
-    system: 'System',
-    userManagement: 'User Management',
-    roleManagement: 'Role Management',
-    menuManagement: 'Menu Management',
-    tokenManagement: 'Token Management',
-    teamManagement: 'Team Management',
-    memberManagement: 'Member Management',
-    project: 'Project',
-    application: 'Application',
-    variable: 'Variable',
-    setting: 'Settings',
-  },
-  setting: {
-    system: 'System Setting',
-    alarm: 'Alarm Setting',
-    flinkHome: 'Flink Home',
-    flinkCluster: 'Flink Cluster',
-    externalLink: 'External Link',
-    yarnQueue: 'Yarn Queue',
-  },
-};
+export interface ResourceListRecord {
+  id: string;
+  resourceName: string;
+  resourceType: string;
+  mainClass: string;
+  description: string;
+  creatorId: string;
+  creatorName: string;
+  teamId: string;
+  createTime: string;
+  modifyTime: string;
+  sortField?: string;
+  sortOrder?: string;
+}
+
+export interface ResourceParam {
+  id?: string;
+  resourceName: string;
+  description: string;
+}
+
+export interface ResourceDeleteParam {
+  id: string;
+  resourceName: string;
+  teamId: string;
+}
+
+export interface BasicTableParams {
+  page: number;
+  pageSize: number;
+  teamId: string;
+  [key: string]: Nullable<string | number>;
+}
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts
index 7e1a6abd2..70236a86a 100644
--- 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/app.ts
@@ -29,6 +29,9 @@ export default {
   developmentMode: 'Development Mode',
   executionMode: 'Execution Mode',
   historyVersion: 'History Version',
+  teamResource: 'Team Resource',
+  teamResourcePlaceHolder: 'choose resource from team library',
+  selectAppPlaceHolder: 'choose app jar',
   dependency: 'Dependency',
   appConf: 'Application Conf',
   resolveOrder: 'resolveOrder',
@@ -50,6 +53,7 @@ export default {
   restServiceExposedType: 'Rest-Service Exposed Type',
   resourceFrom: 'Resource From',
   uploadJobJar: 'Upload Job Jar',
+  selectJobJar: 'Select Job Jar',
   mainClass: 'Program Main',
   project: 'Project',
   module: 'Module',
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/resource.ts
 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/resource.ts
new file mode 100644
index 000000000..3e523ee7a
--- /dev/null
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/flink/resource.ts
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+export default {
+  addResource: 'Add Resource',
+  resourceInfoTitle: 'Resource Info',
+  modifyResource: 'Modify Resource',
+  deleteResource: 'Delete Resource',
+  deletePopConfirm: 'Are you sure delete this resource ?',
+  uploadResource: 'Upload Resource',
+  resourceType: 'Resource Type',
+  add: 'Add',
+  success: ' successful',
+  fail: ' failed',
+  table: {
+    title: 'Resource List',
+    resourceName: 'Resource Name',
+    resourceNamePlaceholder: 'Please enter the resource name to search',
+    descriptionPlaceholder: 'Please enter description to search',
+    createUser: 'Create User',
+    createTime: 'Create Time',
+    modifyTime: 'Modify Time',
+    description: 'Description',
+  },
+  form: {
+    descriptionMessage: 'exceeds maximum length limit of 100 characters',
+    exists: 'Sorry, the Resource already exists',
+    empty: 'Resource cannot be empty',
+  },
+};
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts 
b/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts
index e13a8a5bc..8d93eed0c 100644
--- a/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts
+++ b/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts
@@ -26,6 +26,7 @@ export default {
     project: 'Project',
     application: 'Application',
     variable: 'Variable',
+    resource: 'Resource',
     setting: 'Settings',
   },
   setting: {
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
index 7ac9ef7e0..92e788091 100644
--- 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/app.ts
@@ -29,6 +29,9 @@ export default {
   developmentMode: '作业模式',
   executionMode: '执行模式',
   historyVersion: '历史版本',
+  teamResource: '团队资源库',
+  teamResourcePlaceHolder: '从团队资源库中选择资源',
+  selectAppPlaceHolder: '选择作业',
   dependency: '作业依赖',
   appConf: '作业配置',
   resolveOrder: '类加载顺序',
@@ -50,6 +53,7 @@ export default {
   restServiceExposedType: 'K8S服务对外类型',
   resourceFrom: '资源来源',
   uploadJobJar: '上传jar文件',
+  selectJobJar: '选择jar文件',
   mainClass: '程序入口类',
   project: '项目',
   module: '模块',
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/resource.ts
similarity index 52%
copy from 
streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts
copy to 
streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/resource.ts
index e13a8a5bc..d46507bbd 100644
--- a/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/flink/resource.ts
@@ -15,25 +15,29 @@
  * limitations under the License.
  */
 export default {
-  menu: {
-    system: 'System',
-    userManagement: 'User Management',
-    roleManagement: 'Role Management',
-    menuManagement: 'Menu Management',
-    tokenManagement: 'Token Management',
-    teamManagement: 'Team Management',
-    memberManagement: 'Member Management',
-    project: 'Project',
-    application: 'Application',
-    variable: 'Variable',
-    setting: 'Settings',
+  addResource: '添加资源',
+  resourceInfoTitle: '资源详情',
+  modifyResource: '修改资源',
+  deleteResource: '删除资源',
+  deletePopConfirm: '你确定要删除这个资源?',
+  uploadResource: '上传资源',
+  resourceType: '资源类型',
+  add: '添加',
+  success: '成功',
+  fail: '失败',
+  table: {
+    title: '资源列表',
+    resourceName: '资源名称',
+    resourceNamePlaceholder: '输入资源名查询',
+    descriptionPlaceholder: '输入描述',
+    createUser: '创建者',
+    createTime: '创建时间',
+    modifyTime: '修改时间',
+    description: '描述',
   },
-  setting: {
-    system: 'System Setting',
-    alarm: 'Alarm Setting',
-    flinkHome: 'Flink Home',
-    flinkCluster: 'Flink Cluster',
-    externalLink: 'External Link',
-    yarnQueue: 'Yarn Queue',
+  form: {
+    descriptionMessage: '超过 100 个字符的最大长度限制',
+    exists: '资源已存在',
+    empty: '资源不能为空',
   },
 };
diff --git 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/menu.ts 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/menu.ts
index b3b8f3183..1a359ee49 100644
--- 
a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/menu.ts
+++ 
b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/menu.ts
@@ -26,6 +26,7 @@ export default {
     project: '项目管理',
     application: '作业管理',
     variable: '变量管理',
+    resource: '资源管理',
     setting: '设置中心',
   },
   setting: {
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/Add.vue 
b/streampark-console/streampark-console-webapp/src/views/flink/app/Add.vue
index 541503f69..44dc27967 100644
--- a/streampark-console/streampark-console-webapp/src/views/flink/app/Add.vue
+++ b/streampark-console/streampark-console-webapp/src/views/flink/app/Add.vue
@@ -208,7 +208,7 @@
         Object.assign(params, {
           resourceFrom: ResourceFromEnum.UPLOAD,
           appType: AppTypeEnum.APACHE_FLINK,
-          jar: unref(uploadJar),
+          jar: values.uploadJobJar,
           mainClass: values.mainClass,
         });
         handleCreateApp(params);
@@ -248,6 +248,7 @@
       appType: AppTypeEnum.STREAMPARK_FLINK,
       config,
       format: values.isSetConfig ? 1 : null,
+      teamResource: JSON.stringify(values.teamResource),
       dependency:
         dependency.pom === undefined && dependency.jar === undefined
           ? null
@@ -336,19 +337,6 @@
           @click="handleSQLConf(true, model)"
         />
       </template>
-      <template #uploadJobJar>
-        <UploadJobJar :custom-request="handleCustomJobRequest" 
v-model:loading="uploadLoading">
-          <template #uploadInfo>
-            <Alert v-if="uploadJar" class="uploadjar-box" type="info">
-              <template #message>
-                <span class="tag-dependency-pom">
-                  {{ uploadJar }}
-                </span>
-              </template>
-            </Alert>
-          </template>
-        </UploadJobJar>
-      </template>
       <template #podTemplate>
         <PomTemplateTab
           v-model:podTemplate="k8sTemplate.podTemplate"
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/EditStreamPark.vue
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/EditStreamPark.vue
index 833681c39..523bd87b4 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/EditStreamPark.vue
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/EditStreamPark.vue
@@ -27,7 +27,11 @@
   import configOptions from './data/option';
   import { fetchMain, fetchUpload, fetchUpdate, fetchGet } from 
'/@/api/flink/app/app';
   import { useRoute } from 'vue-router';
-  import { getAppConfType, handleSubmitParams } from './utils';
+  import {
+    getAppConfType,
+    handleSubmitParams,
+    handleTeamResource
+  } from './utils';
   import { fetchFlinkHistory } from '/@/api/flink/app/flinkSql';
   import { decodeByBase64, encryptByBase64 } from '/@/utils/cipher';
   import PomTemplateTab from './components/PodTemplate/PomTemplateTab.vue';
@@ -110,6 +114,7 @@
         dynamicProperties: app.dynamicProperties,
         resolveOrder: app.resolveOrder,
         versionId: app.versionId || null,
+        teamResource: handleTeamResource(app.teamResource),
         k8sRestExposedType: app.k8sRestExposedType,
         yarnQueue: app.yarnQueue,
         restartSize: app.restartSize,
@@ -206,6 +211,7 @@
         flinkSql: values.flinkSql,
         config,
         format: values.isSetConfig ? 1 : null,
+        teamResource: JSON.stringify(values.teamResource),
         dependency:
           dependency.pom === undefined && dependency.jar === undefined
             ? null
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
index 8d4b7dc1c..132fc7235 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateAndEditSchema.ts
@@ -25,6 +25,7 @@ import {
   renderInputGroup,
   renderIsSetConfig,
   renderOptionsItems,
+  renderStreamParkResource,
   renderTotalMemory,
   renderYarnQueue,
 } from './useFlinkRender';
@@ -53,6 +54,7 @@ import { ClusterStateEnum, ExecModeEnum, JobTypeEnum } from 
'/@/enums/flinkEnum'
 import { isK8sExecMode } from '../utils';
 import { useI18n } from '/@/hooks/web/useI18n';
 import { fetchCheckHadoop } from '/@/api/flink/setting';
+import { fetchTeamResource } from "/@/api/flink/resource";
 const { t } = useI18n();
 export interface HistoryRecord {
   k8sNamespace: Array<string>;
@@ -67,6 +69,7 @@ export const useCreateAndEditSchema = (
   const alerts = ref<AlertSetting[]>([]);
   const flinkClusters = ref<FlinkCluster[]>([]);
   const projectList = ref<Array<any>>([]);
+  const teamResource = ref<Array<any>>([]);
   const historyRecord = reactive<HistoryRecord>({
     k8sNamespace: [],
     k8sSessionClusterId: [],
@@ -119,6 +122,20 @@ export const useCreateAndEditSchema = (
         },
         rules: [{ required: true, message: 
t('flink.app.addAppTips.flinkSqlIsRequiredMessage') }],
       },
+      {
+        field: 'teamResource',
+        label: t('flink.app.teamResource'),
+        component: 'Select',
+        render: ({ model }) =>
+          renderStreamParkResource( { model, resources: unref(teamResource) }, 
),
+        ifShow: ({ values }) => {
+          if (edit?.appId) {
+            return values.jobType == JobTypeEnum.SQL;
+          } else {
+            return values?.jobType == 'sql';
+          }
+        },
+      },
       {
         field: 'dependency',
         label: t('flink.app.dependency'),
@@ -617,6 +634,11 @@ export const useCreateAndEditSchema = (
         };
       });
     });
+
+    /* Get team dependencies */
+    fetchTeamResource({}).then((res) => {
+      teamResource.value = res;
+    });
   });
   return {
     projectList,
@@ -625,6 +647,7 @@ export const useCreateAndEditSchema = (
     flinkClusters,
     historyRecord,
     suggestions,
+    teamResource,
     getFlinkSqlSchema,
     getFlinkClusterSchemas,
     getFlinkFormOtherSchemas,
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateSchema.ts
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateSchema.ts
index 72b11762f..5e6ffca98 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateSchema.ts
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useCreateSchema.ts
@@ -24,7 +24,10 @@ import { fetchMain, fetchName } from '/@/api/flink/app/app';
 import { modules, fetchListConf, fetchListJars } from '/@/api/flink/project';
 import { RuleObject } from 'ant-design-vue/lib/form';
 import { StoreValue } from 'ant-design-vue/lib/form/interface';
-import { renderResourceFrom } from './useFlinkRender';
+import {
+  renderResourceFrom,
+  renderStreamParkJarApp
+} from './useFlinkRender';
 import { filterOption, getAppConfType } from '../utils';
 import { useI18n } from '/@/hooks/web/useI18n';
 const { t } = useI18n();
@@ -56,6 +59,7 @@ export const useCreateSchema = (dependencyRef: Ref) => {
     flinkEnvs,
     flinkClusters,
     projectList,
+    teamResource,
     getFlinkSqlSchema,
     getFlinkClusterSchemas,
     getExecutionModeSchema,
@@ -120,9 +124,10 @@ export const useCreateSchema = (dependencyRef: Ref) => {
       },
       {
         field: 'uploadJobJar',
-        label: t('flink.app.uploadJobJar'),
+        label: t('flink.app.selectJobJar'),
         component: 'Select',
-        slot: 'uploadJobJar',
+        render: ({ model }) =>
+          renderStreamParkJarApp( { model, resources: unref(teamResource) }, ),
         ifShow: ({ values }) => values?.jobType !== 'sql' && 
values?.resourceFrom == 'upload',
       },
       {
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useFlinkRender.tsx
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useFlinkRender.tsx
index e23609925..0dd50afc8 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useFlinkRender.tsx
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useFlinkRender.tsx
@@ -43,6 +43,7 @@ import { CandidateTypeEnum, FailoverStrategyEnum } from 
'/@/enums/flinkEnum';
 import { useI18n } from '/@/hooks/web/useI18n';
 import { fetchYarnQueueList } from '/@/api/flink/setting/yarnQueue';
 import { ApiSelect } from '/@/components/Form';
+import {ResourceTypeEnum} from "/@/views/flink/resource/resource.data";
 
 const { t } = useI18n();
 /* render input dropdown component */
@@ -527,3 +528,92 @@ export const renderResourceFrom = (model: Recordable) => {
     </Select>
   );
 };
+
+export const renderStreamParkResource = ({ model, resources },) => {
+
+  const renderOptions = () => {
+    console.log('resources', resources);
+    return (resources || [])
+      .filter((item) => item.resourceType !== ResourceTypeEnum.APP )
+      .map((resource) => {
+      return (
+        <Select.Option
+          key={resource.id}
+          label={ resource.resourceType + '-' + resource.resourceName}>
+          <div>
+            <Tag color="green" style=";margin-left: 5px;" size="small">
+              {resource.resourceType}
+            </Tag>
+            <span style="color: darkgrey">
+              {resource.resourceName}
+            </span>
+          </div>
+        </Select.Option>
+      );
+    });
+  };
+
+  return (
+    <div>
+      <Select
+        show-search
+        allow-clear
+        optionFilterProp="label"
+        mode="multiple"
+        max-tag-count={3}
+        onChange={(value) => (model.teamResource = value)}
+        value={model.teamResource}
+        placeholder={t('flink.app.teamResourcePlaceHolder')}
+        style="width: calc(100% - 60px)"
+      >
+        {renderOptions()}
+      </Select>
+    </div>
+  );
+};
+
+export const renderStreamParkJarApp = ({ model, resources },) => {
+
+  function handleAppChange(value: SelectValue) {
+    const res = resources.filter((item) => item.id == value)[0];
+    model.mainClass = res.mainClass
+    model.uploadJobJar = res.resourceName;
+  }
+
+  const renderOptions = () => {
+    console.log('resources', resources);
+    return (resources || [])
+      .filter((item) => item.resourceType == ResourceTypeEnum.APP )
+      .map((resource) => {
+      return (
+        <Select.Option
+          key={resource.id}
+          label={resource.resourceName}>
+          <div>
+            <Tag color="green" style=";margin-left: 5px;" size="small">
+              {resource.resourceType}
+            </Tag>
+            <span style="color: darkgrey">
+              {resource.resourceName}
+            </span>
+          </div>
+        </Select.Option>
+      );
+    });
+  };
+
+  return (
+    <div>
+      <Select
+        show-search
+        allow-clear
+        optionFilterProp="label"
+        onChange={handleAppChange}
+        value={model.uploadJobJar}
+        placeholder={t('flink.app.selectAppPlaceHolder')}
+      >
+        {renderOptions()}
+      </Select>
+    </div>
+  );
+};
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/utils/index.ts
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/utils/index.ts
index 98857e361..8e7eba47b 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/utils/index.ts
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/utils/index.ts
@@ -311,3 +311,10 @@ export const filterOption = (input: string, options: 
Recordable) => {
 export function isK8sExecMode(mode: number): boolean {
   return [ExecModeEnum.KUBERNETES_SESSION, 
ExecModeEnum.KUBERNETES_APPLICATION].includes(mode);
 }
+
+export function handleTeamResource(resource: string) {
+    if (resource != null && resource !== '') {
+      return JSON.parse(resource);
+    }
+    return [];
+}
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/resource/View.vue
 
b/streampark-console/streampark-console-webapp/src/views/flink/resource/View.vue
new file mode 100644
index 000000000..a3a970f21
--- /dev/null
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/resource/View.vue
@@ -0,0 +1,186 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      https://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<template>
+  <div>
+    <BasicTable @register="registerTable">
+      <template #toolbar>
+        <a-button type="primary" @click="handleCreate" v-auth="'resource:add'">
+          <Icon icon="ant-design:plus-outlined" />
+          {{ t('common.add') }}
+        </a-button>
+      </template>
+      <template #resetBefore> 1111 </template>
+      <template #bodyCell="{ column, record }">
+        <template v-if="column.dataIndex === 'resourceType'">
+          <Tag
+            class="bold-tag"
+            color="#52c41a"
+            v-if="record.resourceType == ResourceTypeEnum.APP"
+          >
+            APP
+          </Tag>
+          <Tag
+            class="bold-tag"
+            color="#2db7f5"
+            v-if="record.resourceType == ResourceTypeEnum.COMMON"
+          >
+            COMMON
+          </Tag>
+          <Tag
+            class="bold-tag"
+            color="#108ee9"
+            v-if="record.resourceType == ResourceTypeEnum.CONNECTOR"
+          >
+            CONNECTOR
+          </Tag>
+          <Tag
+            class="bold-tag"
+            color="#102541"
+            v-if="record.resourceType == ResourceTypeEnum.FORMAT"
+          >
+            FORMAT
+          </Tag>
+          <Tag
+            class="bold-tag"
+            color="#79f379"
+            v-if="record.resourceType == ResourceTypeEnum.UDF"
+          >
+            UDF
+          </Tag>
+        </template>
+        <template v-if="column.dataIndex === 'action'">
+          <TableAction
+            :actions="[
+              {
+                icon: 'clarity:note-edit-line',
+                auth: 'resource:update',
+                tooltip: t('flink.resource.modifyResource'),
+                onClick: handleEdit.bind(null, record),
+              },
+              {
+                icon: 'ant-design:delete-outlined',
+                color: 'error',
+                tooltip: t('flink.resource.deleteResource'),
+                auth: 'resource:delete',
+                popConfirm: {
+                  title: t('flink.resource.deletePopConfirm'),
+                  confirm: handleDelete.bind(null, record),
+                },
+              },
+            ]"
+          />
+        </template>
+      </template>
+    </BasicTable>
+    <ResourceDrawer @register="registerDrawer" @success="handleSuccess" />
+  </div>
+</template>
+<script lang="ts">
+  export default defineComponent({
+    name: 'Resource',
+  });
+</script>
+
+<script lang="ts" setup>
+  import {defineComponent, ref} from 'vue';
+  import { BasicTable, useTable, TableAction, SorterResult } from 
'/@/components/Table';
+  import ResourceDrawer from './components/ResourceDrawer.vue';
+  import { useDrawer } from '/@/components/Drawer';
+  import { columns, searchFormSchema } from './resource.data';
+  import { useMessage } from '/@/hooks/web/useMessage';
+  import { useI18n } from '/@/hooks/web/useI18n';
+  import Icon from '/@/components/Icon';
+  import { useRouter } from 'vue-router';
+  import { fetchResourceDelete, fetchResourceList } from 
"/@/api/flink/resource";
+  import { ResourceTypeEnum } from "/@/views/flink/resource/resource.data";
+  import { Tag } from 'ant-design-vue';
+
+  const router = useRouter();
+  const [registerDrawer, { openDrawer }] = useDrawer();
+  const [registerInfo, { openDrawer: openInfoDraw }] = useDrawer();
+  const { createMessage } = useMessage();
+  const { t } = useI18n();
+  const [registerTable, { reload }] = useTable({
+    title: t('flink.resource.table.title'),
+    api: fetchResourceList,
+    columns,
+    formConfig: {
+      baseColProps: { style: { paddingRight: '30px' } },
+      schemas: searchFormSchema,
+    },
+    sortFn: (sortInfo: SorterResult) => {
+      const { field, order } = sortInfo;
+      if (field && order) {
+        return {
+          // The sort field passed to the backend you
+          sortField: field,
+          // Sorting method passed to the background asc/desc
+          sortOrder: order === 'ascend' ? 'asc' : 'desc',
+        };
+      } else {
+        return {};
+      }
+    },
+    rowKey: 'id',
+    pagination: true,
+    useSearchForm: true,
+    showTableSetting: true,
+    showIndexColumn: false,
+    canResize: false,
+    actionColumn: {
+      width: 200,
+      title: t('component.table.operation'),
+      dataIndex: 'action',
+    },
+  });
+
+  function handleCreate() {
+    openDrawer(true, {
+      isUpdate: false,
+    });
+  }
+
+  function handleEdit(record: Recordable) {
+    openDrawer(true, {
+      record,
+      isUpdate: true,
+    });
+  }
+
+  /* Delete the resource */
+  async function handleDelete(record: Recordable) {
+    const { data } = await fetchResourceDelete({
+      id: record.id,
+      teamId: record.teamId,
+      resourceName: record.resourceName,
+    });
+    if (data.status === 'success') {
+      createMessage.success(t('flink.resource.deleteResource') + 
t('flink.resource.success'));
+      reload();
+    } else {
+      createMessage.error(t('flink.resource.deleteResource') + 
t('flink.resource.fail'));
+    }
+  }
+
+  function handleSuccess(isUpdate: boolean) {
+    createMessage.success(
+      `${isUpdate ? t('common.edit') : 
t('flink.resource.add')}${t('flink.resource.success')}`,
+    );
+    reload();
+  }
+
+</script>
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/resource/components/ResourceDrawer.vue
 
b/streampark-console/streampark-console-webapp/src/views/flink/resource/components/ResourceDrawer.vue
new file mode 100644
index 000000000..12478bfb2
--- /dev/null
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/resource/components/ResourceDrawer.vue
@@ -0,0 +1,176 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      https://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<template>
+  <BasicDrawer
+    :okText="t('common.submitText')"
+    @register="registerDrawer"
+    showFooter
+    width="650"
+    @ok="handleSubmit"
+  >
+    <template #title>
+      <Icon icon="ant-design:code-outlined" />
+      {{ getTitle }}
+    </template>
+    <BasicForm @register="registerForm" :schemas="getResourceFormSchema">
+      <template #uploadJobJar>
+        <UploadJobJar :custom-request="handleCustomJobRequest" 
v-model:loading="uploadLoading">
+          <template #uploadInfo>
+            <Alert v-if="uploadJar" class="uploadjar-box" type="info">
+              <template #message>
+                <span class="tag-dependency-pom">
+                  {{ uploadJar }}
+                </span>
+              </template>
+            </Alert>
+          </template>
+        </UploadJobJar>
+      </template>
+    </BasicForm>
+  </BasicDrawer>
+</template>
+<script lang="ts">
+  export default {
+    name: 'ResourceDrawer',
+  };
+</script>
+
+<script lang="ts" setup>
+  import { ref, h, computed, unref } from 'vue';
+  import { Alert } from 'ant-design-vue';
+  import { BasicForm, FormSchema, useForm } from '/@/components/Form';
+  import { BasicDrawer, useDrawerInner } from '/@/components/Drawer';
+  import { Icon } from '/@/components/Icon';
+  import { useI18n } from '/@/hooks/web/useI18n';
+  import UploadJobJar from '/@/views/flink/app/components/UploadJobJar.vue';
+  import { fetchUpload } from "/@/api/flink/app/app";
+  import { fetchAddResource, fetchUpdateResource } from 
"/@/api/flink/resource";
+  import { ResourceTypeEnum } from "/@/views/flink/resource/resource.data";
+
+  const emit = defineEmits(['success', 'register']);
+
+  const { t } = useI18n();
+
+  const isUpdate = ref(false);
+  const uploadLoading = ref(false);
+  const uploadJar = ref('');
+  const resourceId = ref<Nullable<number>>(null);
+
+  const getResourceFormSchema = computed((): FormSchema[] => {
+    return [
+      {
+        field: 'resourceType',
+        label: t('flink.resource.resourceType'),
+        component: 'Select',
+        componentProps: {
+          options: [
+            { label: 'APP', value: ResourceTypeEnum.APP },
+            { label: 'COMMON', value: ResourceTypeEnum.COMMON },
+            { label: 'CONNECTOR', value: ResourceTypeEnum.CONNECTOR },
+            { label: 'FORMAT', value: ResourceTypeEnum.FORMAT },
+            { label: 'UDF', value: ResourceTypeEnum.UDF },
+          ],
+        },
+      },
+      {
+        field: 'resourceName',
+        label: t('flink.resource.uploadResource'),
+        component: 'Select',
+        slot: 'uploadJobJar',
+      },
+      {
+        field: 'mainClass',
+        label: t('flink.app.mainClass'),
+        component: 'Input',
+        componentProps: { placeholder: 
t('flink.app.addAppTips.mainClassPlaceholder') },
+        ifShow: ({ values }) => values?.resourceType == 'APP',
+        rules: [{ required: true, message: 
t('flink.app.addAppTips.mainClassIsRequiredMessage') }],
+      },
+      {
+        field: 'description',
+        label: t('common.description'),
+        component: 'InputTextArea',
+        componentProps: { rows: 4 },
+        rules: [{ max: 100, message: 
t('flink.resource.form.descriptionMessage') }],
+      },
+    ];
+  });
+
+  const [registerForm, { resetFields, setFieldsValue, validate }] = useForm({
+    name: 'ResourceForm',
+    colon: true,
+    showActionButtonGroup: false,
+    baseColProps: { span: 24 },
+    labelCol: { lg: { span: 5, offset: 0 }, sm: { span: 7, offset: 0 } },
+    wrapperCol: { lg: { span: 16, offset: 0 }, sm: { span: 17, offset: 0 } },
+  });
+
+  const [registerDrawer, { setDrawerProps, changeLoading, closeDrawer }] = 
useDrawerInner(
+    async (data: Recordable) => {
+      resetFields();
+      setDrawerProps({ confirmLoading: false });
+      isUpdate.value = !!data?.isUpdate;
+      if (unref(isUpdate)) {
+        resourceId.value = data.record.id;
+        setFieldsValue(data.record);
+      }
+    },
+  );
+
+  const getTitle = computed(() =>
+    !unref(isUpdate) ? t('flink.resource.addResource') : 
t('flink.resource.modifyResource'),
+  );
+
+  // form submit
+  async function handleSubmit() {
+    try {
+      const values = await validate();
+      setDrawerProps({ confirmLoading: true });
+      await (isUpdate.value
+        ? fetchUpdateResource({ id: resourceId.value, ...values })
+        : fetchAddResource(values));
+      uploadJar.value = ''
+      closeDrawer();
+      emit('success', isUpdate.value);
+    } finally {
+      setDrawerProps({ confirmLoading: false });
+    }
+  }
+
+  /* Custom job upload */
+  async function handleCustomJobRequest(data) {
+    const formData = new FormData();
+    formData.append('file', data.file);
+    try {
+      const path = await fetchUpload(formData);
+      uploadJar.value = data.file.name;
+      uploadLoading.value = false;
+      setFieldsValue({ resourceName: uploadJar.value });
+    } catch (error) {
+      console.error(error);
+      uploadLoading.value = false;
+    }
+  }
+</script>
+
+<style lang="less">
+  .conf-switch {
+    display: inline-block;
+    margin-top: 10px;
+    color: darkgrey;
+  }
+</style>
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/resource/resource.data.ts
 
b/streampark-console/streampark-console-webapp/src/views/flink/resource/resource.data.ts
new file mode 100644
index 000000000..0137633d1
--- /dev/null
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/resource/resource.data.ts
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import { BasicColumn, FormSchema } from '/@/components/Table';
+import { useI18n } from '/@/hooks/web/useI18n';
+const { t } = useI18n();
+
+export enum ResourceTypeEnum {
+  APP = 'APP',
+  COMMON = 'COMMON',
+  CONNECTOR = 'CONNECTOR',
+  FORMAT = 'FORMAT',
+  UDF = 'UDF',
+}
+
+export const columns: BasicColumn[] = [
+  {
+    title: t('flink.resource.table.resourceName'),
+    dataIndex: 'resourceName',
+    sorter: true,
+  },
+  {
+    title: t('common.description'),
+    dataIndex: 'description',
+  },
+  {
+    title: t('flink.resource.resourceType'),
+    dataIndex: 'resourceType',
+  },
+  {
+    title: t('common.createTime'),
+    dataIndex: 'createTime',
+    sorter: true,
+  },
+  {
+    title: t('common.modifyTime'),
+    dataIndex: 'modifyTime',
+    sorter: true,
+  },
+];
+
+export const searchFormSchema: FormSchema[] = [
+  {
+    field: 'resourceName',
+    label: t('flink.resource.table.resourceName'),
+    component: 'Input',
+    componentProps: { placeholder: 
t('flink.resource.table.resourceNamePlaceholder') },
+    colProps: { span: 8 },
+  },
+  {
+    field: 'description',
+    label: t('common.description'),
+    component: 'Input',
+    componentProps: { placeholder: 
t('flink.resource.table.descriptionPlaceholder') },
+    colProps: { span: 8 },
+  },
+];
diff --git 
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/maven/DependencyInfo.scala
 
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/maven/DependencyInfo.scala
index c80e416e6..698e9f8a0 100644
--- 
a/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/maven/DependencyInfo.scala
+++ 
b/streampark-flink/streampark-flink-packer/src/main/scala/org/apache/streampark/flink/packer/maven/DependencyInfo.scala
@@ -36,6 +36,9 @@ case class DependencyInfo(mavenArts: Set[Artifact] = Set(), 
extJarLibs: Set[Stri
   def merge(jarLibs: Set[String]): DependencyInfo =
     if (jarLibs != null) DependencyInfo(mavenArts, extJarLibs ++ jarLibs) else 
this.copy()
 
+  def merge(jarLibs: JavaList[String]): DependencyInfo =
+    if (jarLibs != null) DependencyInfo(mavenArts, extJarLibs ++ 
jarLibs.toSet) else this.copy()
+
 }
 
 object DependencyInfo {

Reply via email to