This is an automated email from the ASF dual-hosted git repository.

benjobs pushed a commit to branch sync
in repository https://gitbox.apache.org/repos/asf/incubator-streampark.git

commit f8e591713e70fec94aeda7205eaf35024f62d2b8
Author: benjobs <[email protected]>
AuthorDate: Fri May 31 23:27:35 2024 +0800

    [Improve] application entity drop appId column
---
 .../apache/streampark/console/base/util/Tuple.java |  62 +++++++
 .../streampark/console/base/util/Tuple1.java       | 113 +++++++++++++
 .../streampark/console/base/util/Tuple2.java       | 163 ++++++++++++++++++
 .../streampark/console/base/util/Tuple3.java       | 171 +++++++++++++++++++
 .../streampark/console/base/util/Tuple4.java       | 187 +++++++++++++++++++++
 .../console/core/bean/AlertTemplate.java           |   4 +-
 .../console/core/entity/Application.java           |   5 +-
 .../impl/ApplicationActionServiceImpl.java         |  65 ++++---
 .../impl/ApplicationManageServiceImpl.java         |  79 ++++-----
 .../core/service/impl/ExternalLinkServiceImpl.java |   2 +-
 .../core/service/impl/SavePointServiceImpl.java    |  12 +-
 .../console/core/watcher/FlinkAppHttpWatcher.java  |   8 +-
 .../core/service/alert/AlertServiceTest.java       |   4 +-
 .../src/api/flink/flinkSql.ts                      |   2 +-
 .../src/api/flink/savepoint.ts                     |   2 +-
 .../Application/src/AppDarkModeToggle.vue          |   4 +-
 .../src/components/ContextMenu/src/ContextMenu.vue |   4 +-
 .../src/components/Form/src/BasicForm.vue          |   2 +-
 .../components/Form/src/components/FormItem.vue    |   8 +-
 .../Modal/src/components/ModalWrapper.vue          |   4 +-
 .../src/components/Page/src/PageFooter.vue         |   4 +-
 .../components/Table/src/components/HeaderCell.vue |   2 +-
 .../src/hooks/setting/useMenuSetting.ts            |  10 +-
 .../src/hooks/web/useLockPage.ts                   |   9 +-
 .../streampark-console-webapp/src/utils/props.ts   |  53 +++---
 .../src/views/base/error-log/data.tsx              |  12 +-
 .../src/views/base/login/Login.vue                 |  27 ++-
 .../src/views/base/login/LoginForm.vue             |   4 +-
 .../src/views/flink/app/EditFlink.vue              |  17 +-
 .../src/views/flink/app/EditStreamPark.vue         |  17 +-
 .../src/views/flink/app/View.vue                   |   5 -
 .../flink/app/components/AppDetail/DetailTab.vue   |   7 +-
 .../components/AppDetail/FlinkSqlCompareModal.vue  |   6 +-
 .../src/views/flink/app/components/FlinkSql.vue    |  41 ++---
 .../src/views/flink/app/data/detail.data.ts        |   2 +-
 .../src/views/flink/app/hooks/useDetail.ts         |   2 +-
 .../src/views/flink/app/hooks/useEditStreamPark.ts |   7 +-
 37 files changed, 947 insertions(+), 179 deletions(-)

diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple.java
new file mode 100644
index 000000000..f80d6bd38
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.streampark.console.base.util;
+
+/**
+ * The base class of all tuples. Tuples have a fix length and contain a set of 
fields, which may all
+ * be of different types. Because Tuples are strongly typed, each distinct 
tuple length is
+ * represented by its own class. Tuples exists with up to 25 fields and are 
described in the classes
+ * {@link Tuple1} to {@link Tuple4}.
+ *
+ * <p>The fields in the tuples may be accessed directly a public fields, or 
via position (zero
+ * indexed) {@link #get(int)}.
+ *
+ * <p>Tuples are in principle serializable. However, they may contain 
non-serializable fields, in
+ * which case serialization will fail.
+ */
+public abstract class Tuple implements java.io.Serializable {
+
+  private static final long serialVersionUID = 1L;
+
+  /**
+   * Gets the field at the specified position.
+   *
+   * @param pos The position of the field, zero indexed.
+   * @return The field at the specified position.
+   * @throws IndexOutOfBoundsException Thrown, if the position is negative, or 
equal to, or larger
+   *     than the number of fields.
+   */
+  public abstract <T> T get(int pos);
+
+  /**
+   * Sets the field at the specified position.
+   *
+   * @param value The value to be assigned to the field at the specified 
position.
+   * @param pos The position of the field, zero indexed.
+   * @throws IndexOutOfBoundsException Thrown, if the position is negative, or 
equal to, or larger
+   *     than the number of fields.
+   */
+  public abstract <T> void set(T value, int pos);
+
+  /**
+   * Shallow tuple copy.
+   *
+   * @return A new Tuple with the same fields as this.
+   */
+  public abstract <T extends Tuple> T copy();
+}
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple1.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple1.java
new file mode 100644
index 000000000..1a6f79d0b
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple1.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.streampark.console.base.util;
+
+import java.util.Objects;
+
+public class Tuple1<T0> extends Tuple {
+
+  private static final long serialVersionUID = 1L;
+
+  /** Field 0 of the tuple. */
+  public T0 t1;
+
+  /** Creates a new tuple where all fields are null. */
+  public Tuple1() {}
+
+  /**
+   * Creates a new tuple and assigns the given values to the tuple's fields.
+   *
+   * @param t0 The value for field 0
+   */
+  public Tuple1(T0 t0) {
+    this.t1 = t0;
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public <T> T get(int pos) {
+    if (pos == 0) {
+      return (T) this.t1;
+    }
+    throw new IndexOutOfBoundsException(String.valueOf(pos));
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public <T> void set(T value, int pos) {
+    if (pos == 0) {
+      this.t1 = (T0) value;
+    } else {
+      throw new IndexOutOfBoundsException(String.valueOf(pos));
+    }
+  }
+
+  /**
+   * Sets new values to all fields of the tuple.
+   *
+   * @param f0 The value for field 0
+   */
+  public void set(T0 f0) {
+    this.t1 = f0;
+  }
+
+  /**
+   * Deep equality for tuples by calling equals() on the tuple members.
+   *
+   * @param o the object checked for equality
+   * @return true if this is equal to o.
+   */
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (!(o instanceof Tuple1)) {
+      return false;
+    }
+    @SuppressWarnings("rawtypes")
+    Tuple1 tuple = (Tuple1) o;
+    return Objects.equals(t1, tuple.t1);
+  }
+
+  @Override
+  public int hashCode() {
+    return t1 != null ? t1.hashCode() : 0;
+  }
+
+  /**
+   * Shallow tuple copy.
+   *
+   * @return A new Tuple with the same fields as this.
+   */
+  @Override
+  @SuppressWarnings("unchecked")
+  public Tuple1<T0> copy() {
+    return new Tuple1<>(this.t1);
+  }
+
+  /**
+   * Creates a new tuple and assigns the given values to the tuple's fields. 
This is more convenient
+   * than using the constructor, because the compiler can infer the generic 
type arguments
+   * implicitly. For example: {@code Tuple3.of(n, x, s)} instead of {@code new 
Tuple3<Integer,
+   * Double, String>(n, x, s)}
+   */
+  public static <T0> Tuple1<T0> of(T0 f0) {
+    return new Tuple1<>(f0);
+  }
+}
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple2.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple2.java
new file mode 100644
index 000000000..886beb113
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple2.java
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.streampark.console.base.util;
+
+import java.util.Objects;
+
+/**
+ * A tuple with 2 fields. Tuples are strongly typed; each field may be of a 
separate type. The
+ * fields of the tuple can be accessed directly as public fields (f0, f1, ...) 
or via their position
+ * through the {@link #get(int)} method. The tuple field positions start at 
zero.
+ *
+ * <p>Tuples are mutable types, meaning that their fields can be re-assigned. 
This allows functions
+ * that work with Tuples to reuse objects in order to reduce pressure on the 
garbage collector.
+ *
+ * <p>Warning: If you subclass Tuple2, then be sure to either
+ *
+ * <ul>
+ *   <li>not add any new fields, or
+ *   <li>make it a POJO, and always declare the element type of your 
DataStreams/DataSets to your
+ *       descendant type. (That is, if you have a "class Foo extends Tuple2", 
then don't use
+ *       instances of Foo in a DataStream&lt;Tuple2&gt; / 
DataSet&lt;Tuple2&gt;, but declare it as
+ *       DataStream&lt;Foo&gt; / DataSet&lt;Foo&gt;.)
+ * </ul>
+ *
+ * @see Tuple
+ * @param <T0> The type of field 0
+ * @param <T1> The type of field 1
+ */
+public class Tuple2<T0, T1> extends Tuple {
+
+  private static final long serialVersionUID = 1L;
+
+  /** Field 0 of the tuple. */
+  public T0 t1;
+  /** Field 1 of the tuple. */
+  public T1 t2;
+
+  /** Creates a new tuple where all fields are null. */
+  public Tuple2() {}
+
+  /**
+   * Creates a new tuple and assigns the given values to the tuple's fields.
+   *
+   * @param t0 The value for field 0
+   * @param t1 The value for field 1
+   */
+  public Tuple2(T0 t0, T1 t1) {
+    this.t1 = t0;
+    this.t2 = t1;
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public <T> T get(int pos) {
+    switch (pos) {
+      case 0:
+        return (T) this.t1;
+      case 1:
+        return (T) this.t2;
+      default:
+        throw new IndexOutOfBoundsException(String.valueOf(pos));
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  public <T> void set(T value, int pos) {
+    switch (pos) {
+      case 0:
+        this.t1 = (T0) value;
+        break;
+      case 1:
+        this.t2 = (T1) value;
+        break;
+      default:
+        throw new IndexOutOfBoundsException(String.valueOf(pos));
+    }
+  }
+
+  /**
+   * Sets new values to all fields of the tuple.
+   *
+   * @param f0 The value for field 0
+   * @param f1 The value for field 1
+   */
+  public void set(T0 f0, T1 f1) {
+    this.t1 = f0;
+    this.t2 = f1;
+  }
+
+  /**
+   * Returns a shallow copy of the tuple with swapped values.
+   *
+   * @return shallow copy of the tuple with swapped values
+   */
+  public Tuple2<T1, T0> swap() {
+    return new Tuple2<T1, T0>(t2, t1);
+  }
+
+  /**
+   * Deep equality for tuples by calling equals() on the tuple members.
+   *
+   * @param o the object checked for equality
+   * @return true if this is equal to o.
+   */
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (!(o instanceof Tuple2)) {
+      return false;
+    }
+    @SuppressWarnings("rawtypes")
+    Tuple2 tuple = (Tuple2) o;
+    if (!Objects.equals(t1, tuple.t1)) {
+      return false;
+    }
+    return Objects.equals(t2, tuple.t2);
+  }
+
+  @Override
+  public int hashCode() {
+    int result = t1 != null ? t1.hashCode() : 0;
+    result = 31 * result + (t2 != null ? t2.hashCode() : 0);
+    return result;
+  }
+
+  /**
+   * Shallow tuple copy.
+   *
+   * @return A new Tuple with the same fields as this.
+   */
+  @Override
+  @SuppressWarnings("unchecked")
+  public Tuple2<T0, T1> copy() {
+    return new Tuple2<>(this.t1, this.t2);
+  }
+
+  /**
+   * Creates a new tuple and assigns the given values to the tuple's fields. 
This is more convenient
+   * than using the constructor, because the compiler can infer the generic 
type arguments
+   * implicitly. For example: {@code Tuple3.of(n, x, s)} instead of {@code new 
Tuple3<Integer,
+   * Double, String>(n, x, s)}
+   */
+  public static <T0, T1> Tuple2<T0, T1> of(T0 f0, T1 f1) {
+    return new Tuple2<>(f0, f1);
+  }
+}
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple3.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple3.java
new file mode 100644
index 000000000..802262418
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple3.java
@@ -0,0 +1,171 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.streampark.console.base.util;
+
+import java.util.Objects;
+
+/**
+ * A tuple with 3 fields. Tuples are strongly typed; each field may be of a 
separate type. The
+ * fields of the tuple can be accessed directly as public fields (f0, f1, ...) 
or via their position
+ * through the {@link #get(int)} method. The tuple field positions start at 
zero.
+ *
+ * <p>Tuples are mutable types, meaning that their fields can be re-assigned. 
This allows functions
+ * that work with Tuples to reuse objects in order to reduce pressure on the 
garbage collector.
+ *
+ * <p>Warning: If you subclass Tuple3, then be sure to either
+ *
+ * <ul>
+ *   <li>not add any new fields, or
+ *   <li>make it a POJO, and always declare the element type of your 
DataStreams/DataSets to your
+ *       descendant type. (That is, if you have a "class Foo extends Tuple3", 
then don't use
+ *       instances of Foo in a DataStream&lt;Tuple3&gt; / 
DataSet&lt;Tuple3&gt;, but declare it as
+ *       DataStream&lt;Foo&gt; / DataSet&lt;Foo&gt;.)
+ * </ul>
+ *
+ * @see Tuple
+ * @param <T0> The type of field 0
+ * @param <T1> The type of field 1
+ * @param <T2> The type of field 2
+ */
+public class Tuple3<T0, T1, T2> extends Tuple {
+
+  private static final long serialVersionUID = 1L;
+
+  /** Field 0 of the tuple. */
+  public T0 t1;
+  /** Field 1 of the tuple. */
+  public T1 t2;
+  /** Field 2 of the tuple. */
+  public T2 t3;
+
+  /** Creates a new tuple where all fields are null. */
+  public Tuple3() {}
+
+  /**
+   * Creates a new tuple and assigns the given values to the tuple's fields.
+   *
+   * @param t0 The value for field 0
+   * @param t1 The value for field 1
+   * @param t2 The value for field 2
+   */
+  public Tuple3(T0 t0, T1 t1, T2 t2) {
+    this.t1 = t0;
+    this.t2 = t1;
+    this.t3 = t2;
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public <T> T get(int pos) {
+    switch (pos) {
+      case 0:
+        return (T) this.t1;
+      case 1:
+        return (T) this.t2;
+      case 2:
+        return (T) this.t3;
+      default:
+        throw new IndexOutOfBoundsException(String.valueOf(pos));
+    }
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public <T> void set(T value, int pos) {
+    switch (pos) {
+      case 0:
+        this.t1 = (T0) value;
+        break;
+      case 1:
+        this.t2 = (T1) value;
+        break;
+      case 2:
+        this.t3 = (T2) value;
+        break;
+      default:
+        throw new IndexOutOfBoundsException(String.valueOf(pos));
+    }
+  }
+
+  /**
+   * Sets new values to all fields of the tuple.
+   *
+   * @param f0 The value for field 0
+   * @param f1 The value for field 1
+   * @param f2 The value for field 2
+   */
+  public void set(T0 f0, T1 f1, T2 f2) {
+    this.t1 = f0;
+    this.t2 = f1;
+    this.t3 = f2;
+  }
+
+  /**
+   * Deep equality for tuples by calling equals() on the tuple members.
+   *
+   * @param o the object checked for equality
+   * @return true if this is equal to o.
+   */
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (!(o instanceof Tuple3)) {
+      return false;
+    }
+    @SuppressWarnings("rawtypes")
+    Tuple3 tuple = (Tuple3) o;
+    if (!Objects.equals(t1, tuple.t1)) {
+      return false;
+    }
+    if (!Objects.equals(t2, tuple.t2)) {
+      return false;
+    }
+    return Objects.equals(t3, tuple.t3);
+  }
+
+  @Override
+  public int hashCode() {
+    int result = t1 != null ? t1.hashCode() : 0;
+    result = 31 * result + (t2 != null ? t2.hashCode() : 0);
+    result = 31 * result + (t3 != null ? t3.hashCode() : 0);
+    return result;
+  }
+
+  /**
+   * Shallow tuple copy.
+   *
+   * @return A new Tuple with the same fields as this.
+   */
+  @Override
+  @SuppressWarnings("unchecked")
+  public Tuple3<T0, T1, T2> copy() {
+    return new Tuple3<>(this.t1, this.t2, this.t3);
+  }
+
+  /**
+   * Creates a new tuple and assigns the given values to the tuple's fields. 
This is more convenient
+   * than using the constructor, because the compiler can infer the generic 
type arguments
+   * implicitly. For example: {@code Tuple3.of(n, x, s)} instead of {@code new 
Tuple3<Integer,
+   * Double, String>(n, x, s)}
+   */
+  public static <T0, T1, T2> Tuple3<T0, T1, T2> of(T0 f0, T1 f1, T2 f2) {
+    return new Tuple3<>(f0, f1, f2);
+  }
+}
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple4.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple4.java
new file mode 100644
index 000000000..0296d33e5
--- /dev/null
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/Tuple4.java
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.streampark.console.base.util;
+
+import java.util.Objects;
+
+/**
+ * A tuple with 4 fields. Tuples are strongly typed; each field may be of a 
separate type. The
+ * fields of the tuple can be accessed directly as public fields (f0, f1, ...) 
or via their position
+ * through the {@link #get(int)} method. The tuple field positions start at 
zero.
+ *
+ * <p>Tuples are mutable types, meaning that their fields can be re-assigned. 
This allows functions
+ * that work with Tuples to reuse objects in order to reduce pressure on the 
garbage collector.
+ *
+ * <p>Warning: If you subclass Tuple4, then be sure to either
+ *
+ * <ul>
+ *   <li>not add any new fields, or
+ *   <li>make it a POJO, and always declare the element type of your 
DataStreams/DataSets to your
+ *       descendant type. (That is, if you have a "class Foo extends Tuple4", 
then don't use
+ *       instances of Foo in a DataStream&lt;Tuple4&gt; / 
DataSet&lt;Tuple4&gt;, but declare it as
+ *       DataStream&lt;Foo&gt; / DataSet&lt;Foo&gt;.)
+ * </ul>
+ *
+ * @see Tuple
+ * @param <T0> The type of field 0
+ * @param <T1> The type of field 1
+ * @param <T2> The type of field 2
+ * @param <T3> The type of field 3
+ */
+public class Tuple4<T0, T1, T2, T3> extends Tuple {
+
+  private static final long serialVersionUID = 1L;
+
+  /** Field 0 of the tuple. */
+  public T0 t1;
+  /** Field 1 of the tuple. */
+  public T1 t2;
+  /** Field 2 of the tuple. */
+  public T2 t3;
+  /** Field 3 of the tuple. */
+  public T3 t4;
+
+  /** Creates a new tuple where all fields are null. */
+  public Tuple4() {}
+
+  /**
+   * Creates a new tuple and assigns the given values to the tuple's fields.
+   *
+   * @param t0 The value for field 0
+   * @param t1 The value for field 1
+   * @param t2 The value for field 2
+   * @param t4 The value for field 3
+   */
+  public Tuple4(T0 t0, T1 t1, T2 t2, T3 t4) {
+    this.t1 = t0;
+    this.t2 = t1;
+    this.t3 = t2;
+    this.t4 = t4;
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public <T> T get(int pos) {
+    switch (pos) {
+      case 0:
+        return (T) this.t1;
+      case 1:
+        return (T) this.t2;
+      case 2:
+        return (T) this.t3;
+      case 3:
+        return (T) this.t4;
+      default:
+        throw new IndexOutOfBoundsException(String.valueOf(pos));
+    }
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public <T> void set(T value, int pos) {
+    switch (pos) {
+      case 0:
+        this.t1 = (T0) value;
+        break;
+      case 1:
+        this.t2 = (T1) value;
+        break;
+      case 2:
+        this.t3 = (T2) value;
+        break;
+      case 3:
+        this.t4 = (T3) value;
+        break;
+      default:
+        throw new IndexOutOfBoundsException(String.valueOf(pos));
+    }
+  }
+
+  /**
+   * Sets new values to all fields of the tuple.
+   *
+   * @param f0 The value for field 0
+   * @param f1 The value for field 1
+   * @param f2 The value for field 2
+   * @param f3 The value for field 3
+   */
+  public void set(T0 f0, T1 f1, T2 f2, T3 f3) {
+    this.t1 = f0;
+    this.t2 = f1;
+    this.t3 = f2;
+    this.t4 = f3;
+  }
+
+  /**
+   * Deep equality for tuples by calling equals() on the tuple members.
+   *
+   * @param o the object checked for equality
+   * @return true if this is equal to o.
+   */
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (!(o instanceof Tuple4)) {
+      return false;
+    }
+    @SuppressWarnings("rawtypes")
+    Tuple4 tuple = (Tuple4) o;
+    if (!Objects.equals(t1, tuple.t1)) {
+      return false;
+    }
+    if (!Objects.equals(t2, tuple.t2)) {
+      return false;
+    }
+    if (!Objects.equals(t3, tuple.t3)) {
+      return false;
+    }
+    return Objects.equals(t4, tuple.t4);
+  }
+
+  @Override
+  public int hashCode() {
+    int result = t1 != null ? t1.hashCode() : 0;
+    result = 31 * result + (t2 != null ? t2.hashCode() : 0);
+    result = 31 * result + (t3 != null ? t3.hashCode() : 0);
+    result = 31 * result + (t4 != null ? t4.hashCode() : 0);
+    return result;
+  }
+
+  /**
+   * Shallow tuple copy.
+   *
+   * @return A new Tuple with the same fields as this.
+   */
+  @Override
+  @SuppressWarnings("unchecked")
+  public Tuple4<T0, T1, T2, T3> copy() {
+    return new Tuple4<>(this.t1, this.t2, this.t3, this.t4);
+  }
+
+  /**
+   * Creates a new tuple and assigns the given values to the tuple's fields. 
This is more convenient
+   * than using the constructor, because the compiler can infer the generic 
type arguments
+   * implicitly. For example: {@code Tuple3.of(n, x, s)} instead of {@code new 
Tuple3<Integer,
+   * Double, String>(n, x, s)}
+   */
+  public static <T0, T1, T2, T3> Tuple4<T0, T1, T2, T3> of(T0 f0, T1 f1, T2 
f2, T3 f3) {
+    return new Tuple4<>(f0, f1, f2, f3);
+  }
+}
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertTemplate.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertTemplate.java
index e96b31583..3491578de 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertTemplate.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertTemplate.java
@@ -67,7 +67,7 @@ public class AlertTemplate implements Serializable {
     return new AlertTemplateBuilder()
         .setDuration(application.getStartTime(), application.getEndTime())
         .setJobName(application.getJobName())
-        .setLink(application.getFlinkExecutionMode(), application.getAppId())
+        .setLink(application.getFlinkExecutionMode(), 
application.getClusterId())
         .setStartTime(application.getStartTime())
         .setEndTime(application.getEndTime())
         .setRestart(application.isNeedRestartOnFailed(), 
application.getRestartCount())
@@ -87,7 +87,7 @@ public class AlertTemplate implements Serializable {
     return new AlertTemplateBuilder()
         .setDuration(application.getStartTime(), application.getEndTime())
         .setJobName(application.getJobName())
-        .setLink(application.getFlinkExecutionMode(), application.getAppId())
+        .setLink(application.getFlinkExecutionMode(), 
application.getClusterId())
         .setStartTime(application.getStartTime())
         .setType(2)
         .setCpFailureRateInterval(
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
index 255fa1afa..dd6abd8d1 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java
@@ -80,9 +80,6 @@ public class Application implements Serializable {
   /** The name of the frontend and program displayed in yarn */
   private String jobName;
 
-  @TableField(updateStrategy = FieldStrategy.IGNORED)
-  private String appId;
-
   @TableField(updateStrategy = FieldStrategy.IGNORED)
   private String jobId;
 
@@ -93,7 +90,7 @@ public class Application implements Serializable {
   /** flink version */
   private Long versionId;
 
-  /** k8s cluster id */
+  /** 1. yarn application id(on yarn) 2. k8s application id (on k8s 
application) */
   private String clusterId;
 
   /** flink docker base image */
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java
index be0d4a271..3fddda4c9 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java
@@ -34,6 +34,7 @@ import org.apache.streampark.common.util.HadoopUtils;
 import org.apache.streampark.common.util.PropertiesUtils;
 import org.apache.streampark.console.base.exception.ApiAlertException;
 import org.apache.streampark.console.base.exception.ApplicationException;
+import org.apache.streampark.console.base.util.Tuple2;
 import org.apache.streampark.console.core.entity.AppBuildPipeline;
 import org.apache.streampark.console.core.entity.Application;
 import org.apache.streampark.console.core.entity.ApplicationConfig;
@@ -83,7 +84,6 @@ import 
org.apache.streampark.flink.packer.pipeline.ShadedBuildResponse;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.flink.api.common.JobID;
-import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.configuration.CoreOptions;
 import org.apache.flink.configuration.JobManagerOptions;
 import org.apache.flink.configuration.MemorySize;
@@ -269,23 +269,6 @@ public class ApplicationActionServiceImpl extends 
ServiceImpl<ApplicationMapper,
       }
     }
 
-    String clusterId = null;
-    if (FlinkExecutionMode.isKubernetesMode(application.getExecutionMode())) {
-      clusterId = application.getClusterId();
-    } else if (FlinkExecutionMode.isYarnMode(application.getExecutionMode())) {
-      if (FlinkExecutionMode.YARN_SESSION == 
application.getFlinkExecutionMode()) {
-        FlinkCluster cluster = 
flinkClusterService.getById(application.getFlinkClusterId());
-        ApiAlertException.throwIfNull(
-            cluster,
-            String.format(
-                "The yarn session clusterId=%s can't found, maybe the 
clusterId is wrong or the cluster has been deleted. Please contact the Admin.",
-                application.getFlinkClusterId()));
-        clusterId = cluster.getClusterId();
-      } else {
-        clusterId = application.getAppId();
-      }
-    }
-
     Map<String, Object> properties = new HashMap<>();
 
     if (FlinkExecutionMode.isRemoteMode(application.getFlinkExecutionMode())) {
@@ -301,6 +284,10 @@ public class ApplicationActionServiceImpl extends 
ServiceImpl<ApplicationMapper,
       properties.put(RestOptions.PORT.key(), activeAddress.getPort());
     }
 
+    Tuple2<String, String> clusterIdNamespace = 
getNamespaceClusterId(application);
+    String namespace = clusterIdNamespace.t1;
+    String clusterId = clusterIdNamespace.t2;
+
     CancelRequest cancelRequest =
         new CancelRequest(
             application.getId(),
@@ -313,7 +300,7 @@ public class ApplicationActionServiceImpl extends 
ServiceImpl<ApplicationMapper,
             appParam.getDrain(),
             customSavepoint,
             appParam.getNativeFormat(),
-            application.getK8sNamespace());
+            namespace);
 
     final Date triggerTime = new Date();
     CompletableFuture<CancelResponse> cancelFuture =
@@ -439,8 +426,8 @@ public class ApplicationActionServiceImpl extends 
ServiceImpl<ApplicationMapper,
             flinkK8sDataTypeConverter.genDefaultFlinkDeploymentIngressDef());
 
     Tuple2<String, String> userJarAndAppConf = getUserJarAndAppConf(flinkEnv, 
application);
-    String flinkUserJar = userJarAndAppConf.f0;
-    String appConf = userJarAndAppConf.f1;
+    String flinkUserJar = userJarAndAppConf.t1;
+    String appConf = userJarAndAppConf.t2;
 
     BuildResult buildResult = buildPipeline.getBuildResult();
     if (FlinkExecutionMode.YARN_APPLICATION == 
application.getFlinkExecutionMode()) {
@@ -518,11 +505,15 @@ public class ApplicationActionServiceImpl extends 
ServiceImpl<ApplicationMapper,
         application.setTmMemory(MemorySize.parse(tmMemory).getMebiBytes());
       }
     }
-    application.setAppId(response.clusterId());
     if (StringUtils.isNoneEmpty(response.jobId())) {
       application.setJobId(response.jobId());
     }
 
+    if (FlinkExecutionMode.isYarnMode(application.getExecutionMode())) {
+      application.setClusterId(response.clusterId());
+      applicationLog.setYarnAppId(response.clusterId());
+    }
+
     if (StringUtils.isNoneEmpty(response.jobManagerUrl())) {
       application.setJobManagerUrl(response.jobManagerUrl());
       applicationLog.setJobManagerUrl(response.jobManagerUrl());
@@ -848,4 +839,34 @@ public class ApplicationActionServiceImpl extends 
ServiceImpl<ApplicationMapper,
         flinkClusterWatcher.getClusterState(flinkCluster) == 
ClusterState.RUNNING,
         "[StreamPark] The flink cluster not running, please start it");
   }
+
+  private Tuple2<String, String> getNamespaceClusterId(Application 
application) {
+    String clusterId = null;
+    String k8sNamespace = null;
+    FlinkCluster cluster;
+    switch (application.getFlinkExecutionMode()) {
+      case YARN_APPLICATION:
+      case YARN_PER_JOB:
+      case YARN_SESSION:
+        clusterId = application.getClusterId();
+        break;
+      case KUBERNETES_NATIVE_APPLICATION:
+        clusterId = application.getJobName();
+        k8sNamespace = application.getK8sNamespace();
+        break;
+      case KUBERNETES_NATIVE_SESSION:
+        cluster = flinkClusterService.getById(application.getFlinkClusterId());
+        ApiAlertException.throwIfNull(
+            cluster,
+            String.format(
+                "The Kubernetes session clusterId=%s can't found, maybe the 
clusterId is wrong or the cluster has been deleted. Please contact the Admin.",
+                application.getFlinkClusterId()));
+        clusterId = cluster.getClusterId();
+        k8sNamespace = cluster.getK8sNamespace();
+        break;
+      default:
+        break;
+    }
+    return Tuple2.of(k8sNamespace, clusterId);
+  }
 }
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java
index 4f64849df..2d7e4cf04 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java
@@ -395,63 +395,64 @@ public class ApplicationManageServiceImpl extends 
ServiceImpl<ApplicationMapper,
         !existsByJobName,
         "[StreamPark] Application names can't be repeated, copy application 
failed.");
 
-    Application oldApp = getById(appParam.getId());
+    Application persist = getById(appParam.getId());
     Application newApp = new Application();
     String jobName = appParam.getJobName();
 
     newApp.setJobName(jobName);
     newApp.setClusterId(
-        FlinkExecutionMode.isSessionMode(oldApp.getFlinkExecutionMode())
-            ? oldApp.getClusterId()
-            : jobName);
-    newApp.setArgs(appParam.getArgs() != null ? appParam.getArgs() : 
oldApp.getArgs());
-    newApp.setVersionId(oldApp.getVersionId());
-
-    newApp.setFlinkClusterId(oldApp.getFlinkClusterId());
-    newApp.setRestartSize(oldApp.getRestartSize());
-    newApp.setJobType(oldApp.getJobType());
-    newApp.setOptions(oldApp.getOptions());
-    newApp.setDynamicProperties(oldApp.getDynamicProperties());
-    newApp.setResolveOrder(oldApp.getResolveOrder());
-    newApp.setExecutionMode(oldApp.getExecutionMode());
-    newApp.setFlinkImage(oldApp.getFlinkImage());
-    newApp.setK8sNamespace(oldApp.getK8sNamespace());
-    newApp.setK8sRestExposedType(oldApp.getK8sRestExposedType());
-    newApp.setK8sPodTemplate(oldApp.getK8sPodTemplate());
-    newApp.setK8sJmPodTemplate(oldApp.getK8sJmPodTemplate());
-    newApp.setK8sTmPodTemplate(oldApp.getK8sTmPodTemplate());
-    newApp.setK8sHadoopIntegration(oldApp.getK8sHadoopIntegration());
-    newApp.setDescription(oldApp.getDescription());
-    newApp.setAlertId(oldApp.getAlertId());
-    newApp.setCpFailureAction(oldApp.getCpFailureAction());
-    newApp.setCpFailureRateInterval(oldApp.getCpFailureRateInterval());
-    newApp.setCpMaxFailureInterval(oldApp.getCpMaxFailureInterval());
-    newApp.setMainClass(oldApp.getMainClass());
-    newApp.setAppType(oldApp.getAppType());
-    newApp.setResourceFrom(oldApp.getResourceFrom());
-    newApp.setProjectId(oldApp.getProjectId());
-    newApp.setModule(oldApp.getModule());
+        FlinkExecutionMode.isSessionMode(persist.getFlinkExecutionMode())
+            ? persist.getClusterId()
+            : null);
+    newApp.setArgs(appParam.getArgs() != null ? appParam.getArgs() : 
persist.getArgs());
+    newApp.setVersionId(persist.getVersionId());
+
+    newApp.setFlinkClusterId(persist.getFlinkClusterId());
+    newApp.setRestartSize(persist.getRestartSize());
+    newApp.setJobType(persist.getJobType());
+    newApp.setOptions(persist.getOptions());
+    newApp.setDynamicProperties(persist.getDynamicProperties());
+    newApp.setResolveOrder(persist.getResolveOrder());
+    newApp.setExecutionMode(persist.getExecutionMode());
+    newApp.setFlinkImage(persist.getFlinkImage());
+    newApp.setK8sNamespace(persist.getK8sNamespace());
+    newApp.setK8sRestExposedType(persist.getK8sRestExposedType());
+    newApp.setK8sPodTemplate(persist.getK8sPodTemplate());
+    newApp.setK8sJmPodTemplate(persist.getK8sJmPodTemplate());
+    newApp.setK8sTmPodTemplate(persist.getK8sTmPodTemplate());
+    newApp.setK8sHadoopIntegration(persist.getK8sHadoopIntegration());
+    newApp.setDescription(persist.getDescription());
+    newApp.setAlertId(persist.getAlertId());
+    newApp.setCpFailureAction(persist.getCpFailureAction());
+    newApp.setCpFailureRateInterval(persist.getCpFailureRateInterval());
+    newApp.setCpMaxFailureInterval(persist.getCpMaxFailureInterval());
+    newApp.setMainClass(persist.getMainClass());
+    newApp.setAppType(persist.getAppType());
+    newApp.setResourceFrom(persist.getResourceFrom());
+    newApp.setProjectId(persist.getProjectId());
+    newApp.setModule(persist.getModule());
     newApp.setUserId(serviceHelper.getUserId());
     newApp.setState(FlinkAppStateEnum.ADDED.getValue());
     newApp.setRelease(ReleaseStateEnum.NEED_RELEASE.get());
     newApp.setOptionState(OptionStateEnum.NONE.getValue());
+    newApp.setHotParams(persist.getHotParams());
+
+    // createTime & modifyTime
     Date date = new Date();
     newApp.setCreateTime(date);
     newApp.setModifyTime(date);
-    newApp.setHotParams(oldApp.getHotParams());
 
-    newApp.setJar(oldApp.getJar());
-    newApp.setJarCheckSum(oldApp.getJarCheckSum());
-    newApp.setTags(oldApp.getTags());
-    newApp.setTeamId(oldApp.getTeamId());
-    newApp.setHadoopUser(oldApp.getHadoopUser());
+    newApp.setJar(persist.getJar());
+    newApp.setJarCheckSum(persist.getJarCheckSum());
+    newApp.setTags(persist.getTags());
+    newApp.setTeamId(persist.getTeamId());
+    newApp.setDependency(persist.getDependency());
 
     boolean saved = save(newApp);
     if (saved) {
       if (newApp.isFlinkSqlJob()) {
         FlinkSql copyFlinkSql = 
flinkSqlService.getLatestFlinkSql(appParam.getId(), true);
         newApp.setFlinkSql(copyFlinkSql.getSql());
-        newApp.setTeamResource(copyFlinkSql.getTeamResource());
         newApp.setDependency(copyFlinkSql.getDependency());
         FlinkSql flinkSql = new FlinkSql(newApp);
         flinkSqlService.create(flinkSql);
@@ -470,7 +471,7 @@ public class ApplicationManageServiceImpl extends 
ServiceImpl<ApplicationMapper,
       return newApp.getId();
     } else {
       throw new ApiAlertException(
-          "create application from copy failed, copy source app: " + 
oldApp.getJobName());
+          "create application from copy failed, copy source app: " + 
persist.getJobName());
     }
   }
 
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java
index ebf979c3f..800ba4ef8 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java
@@ -90,7 +90,7 @@ public class ExternalLinkServiceImpl extends 
ServiceImpl<ExternalLinkMapper, Ext
     Map<String, String> placeholderValueMap = new HashMap<>();
     placeholderValueMap.put(PlaceholderTypeEnum.JOB_ID.get(), app.getJobId());
     placeholderValueMap.put(PlaceholderTypeEnum.JOB_NAME.get(), 
app.getJobName());
-    placeholderValueMap.put(PlaceholderTypeEnum.YARN_ID.get(), app.getAppId());
+    placeholderValueMap.put(PlaceholderTypeEnum.YARN_ID.get(), 
app.getClusterId());
     PropertyPlaceholderHelper propertyPlaceholderHelper = new 
PropertyPlaceholderHelper("{", "}");
     link.setRenderedLinkUrl(
         propertyPlaceholderHelper.replacePlaceholders(
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java
index 13283c550..d2e1703e5 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java
@@ -303,18 +303,20 @@ public class SavePointServiceImpl extends 
ServiceImpl<SavePointMapper, SavePoint
 
   private String getClusterId(Application application, FlinkCluster cluster) {
     if (FlinkExecutionMode.isKubernetesMode(application.getExecutionMode())) {
-      return application.getClusterId();
-    }
-    if (FlinkExecutionMode.isYarnMode(application.getExecutionMode())) {
-      if (FlinkExecutionMode.YARN_SESSION == 
application.getFlinkExecutionMode()) {
+      return 
FlinkExecutionMode.isKubernetesSessionMode(application.getExecutionMode())
+          ? cluster.getClusterId()
+          : application.getClusterId();
+    } else if (FlinkExecutionMode.isYarnMode(application.getExecutionMode())) {
+      if 
(FlinkExecutionMode.YARN_SESSION.equals(application.getFlinkExecutionMode())) {
         AssertUtils.notNull(
             cluster,
             String.format(
                 "The yarn session clusterId=%s cannot be find, maybe the 
clusterId is wrong or the cluster has been deleted. Please contact the Admin.",
                 application.getFlinkClusterId()));
         return cluster.getClusterId();
+      } else {
+        return application.getClusterId();
       }
-      return application.getAppId();
     }
     return null;
   }
diff --git 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkAppHttpWatcher.java
 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkAppHttpWatcher.java
index 2073d3b68..4cc490811 100644
--- 
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkAppHttpWatcher.java
+++ 
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/watcher/FlinkAppHttpWatcher.java
@@ -674,12 +674,12 @@ public class FlinkAppHttpWatcher {
   }
 
   private YarnAppInfo httpYarnAppInfo(Application application) throws 
Exception {
-    String reqURL = "ws/v1/cluster/apps/".concat(application.getAppId());
+    String reqURL = "ws/v1/cluster/apps/".concat(application.getClusterId());
     return yarnRestRequest(reqURL, YarnAppInfo.class);
   }
 
   private Overview httpOverview(Application application) throws IOException {
-    String appId = application.getAppId();
+    String appId = application.getClusterId();
     if (appId != null
         && (FlinkExecutionMode.YARN_APPLICATION == 
application.getFlinkExecutionMode()
             || FlinkExecutionMode.YARN_PER_JOB == 
application.getFlinkExecutionMode())) {
@@ -708,7 +708,7 @@ public class FlinkAppHttpWatcher {
         reqURL = String.format(format, jmURL);
       } else {
         String format = "proxy/%s/" + flinkUrl;
-        reqURL = String.format(format, application.getAppId());
+        reqURL = String.format(format, application.getClusterId());
       }
       return yarnRestRequest(reqURL, JobsOverview.class);
     }
@@ -743,7 +743,7 @@ public class FlinkAppHttpWatcher {
         reqURL = String.format(format, jmURL, application.getJobId());
       } else {
         String format = "proxy/%s/" + flinkUrl;
-        reqURL = String.format(format, application.getAppId(), 
application.getJobId());
+        reqURL = String.format(format, application.getClusterId(), 
application.getJobId());
       }
       return yarnRestRequest(reqURL, CheckPoints.class);
     }
diff --git 
a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/alert/AlertServiceTest.java
 
b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/alert/AlertServiceTest.java
index c529e02bb..b44b4bf67 100644
--- 
a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/alert/AlertServiceTest.java
+++ 
b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/alert/AlertServiceTest.java
@@ -159,7 +159,7 @@ class AlertServiceTest {
     Application application = new Application();
     application.setStartTime(new Date());
     application.setJobName("Test My Job");
-    application.setAppId("1234567890");
+    application.setClusterId("1234567890");
     application.setAlertId(1L);
 
     application.setRestartCount(5);
@@ -202,7 +202,7 @@ class AlertServiceTest {
       duration = application.getEndTime().getTime() - 
application.getStartTime().getTime();
     }
     String format = "%s/proxy/%s/";
-    String url = String.format(format, YarnUtils.getRMWebAppURL(false), 
application.getAppId());
+    String url = String.format(format, YarnUtils.getRMWebAppURL(false), 
application.getClusterId());
 
     AlertTemplate template = new AlertTemplate();
     template.setJobName(application.getJobName());
diff --git 
a/streampark-console/streampark-console-webapp/src/api/flink/flinkSql.ts 
b/streampark-console/streampark-console-webapp/src/api/flink/flinkSql.ts
index 1ec71f79e..e4af1df51 100644
--- a/streampark-console/streampark-console-webapp/src/api/flink/flinkSql.ts
+++ b/streampark-console/streampark-console-webapp/src/api/flink/flinkSql.ts
@@ -43,7 +43,7 @@ export function fetchFlinkSqlList(data) {
   });
 }
 
-export function fetchRemoveFlinkSql(data: { id: string }): Promise<boolean> {
+export function fetchRemoveFlinkSql(data: { appId: any; id: any }): 
Promise<boolean> {
   return defHttp.post({
     url: FLINK_SQL_API.DELETE,
     data,
diff --git 
a/streampark-console/streampark-console-webapp/src/api/flink/savepoint.ts 
b/streampark-console/streampark-console-webapp/src/api/flink/savepoint.ts
index 359b53136..b4964f6e3 100644
--- a/streampark-console/streampark-console-webapp/src/api/flink/savepoint.ts
+++ b/streampark-console/streampark-console-webapp/src/api/flink/savepoint.ts
@@ -34,7 +34,7 @@ export function fetchSavePonitHistory(data: Recordable) {
  * @param data id
  * @returns {Promise<boolean>}
  */
-export function fetchRemoveSavePoint(data: { id: string }): Promise<boolean> {
+export function fetchRemoveSavePoint(data: { appId: any; id: any }): 
Promise<boolean> {
   return defHttp.post({
     url: SAVE_POINT_API.DELETE,
     data,
diff --git 
a/streampark-console/streampark-console-webapp/src/components/Application/src/AppDarkModeToggle.vue
 
b/streampark-console/streampark-console-webapp/src/components/Application/src/AppDarkModeToggle.vue
index 19ba3b151..d4e0ce164 100644
--- 
a/streampark-console/streampark-console-webapp/src/components/Application/src/AppDarkModeToggle.vue
+++ 
b/streampark-console/streampark-console-webapp/src/components/Application/src/AppDarkModeToggle.vue
@@ -63,7 +63,9 @@
       height: 18px;
       background-color: #fff;
       border-radius: 50%;
-      transition: transform 0.5s, background-color 0.5s;
+      transition:
+        transform 0.5s,
+        background-color 0.5s;
       will-change: transform;
     }
 
diff --git 
a/streampark-console/streampark-console-webapp/src/components/ContextMenu/src/ContextMenu.vue
 
b/streampark-console/streampark-console-webapp/src/components/ContextMenu/src/ContextMenu.vue
index e08c25f36..78cac5c5b 100644
--- 
a/streampark-console/streampark-console-webapp/src/components/ContextMenu/src/ContextMenu.vue
+++ 
b/streampark-console/streampark-console-webapp/src/components/ContextMenu/src/ContextMenu.vue
@@ -179,7 +179,9 @@
     background-color: @component-background;
     border: 1px solid rgb(0 0 0 / 8%);
     border-radius: 0.25rem;
-    box-shadow: 0 2px 2px 0 rgb(0 0 0 / 14%), 0 3px 1px -2px rgb(0 0 0 / 10%),
+    box-shadow:
+      0 2px 2px 0 rgb(0 0 0 / 14%),
+      0 3px 1px -2px rgb(0 0 0 / 10%),
       0 1px 5px 0 rgb(0 0 0 / 6%);
     background-clip: padding-box;
     user-select: none;
diff --git 
a/streampark-console/streampark-console-webapp/src/components/Form/src/BasicForm.vue
 
b/streampark-console/streampark-console-webapp/src/components/Form/src/BasicForm.vue
index 1cd7e3809..e5a9dacf6 100644
--- 
a/streampark-console/streampark-console-webapp/src/components/Form/src/BasicForm.vue
+++ 
b/streampark-console/streampark-console-webapp/src/components/Form/src/BasicForm.vue
@@ -113,7 +113,7 @@
       });
 
       const getBindValue = computed(
-        () => ({ ...attrs, ...props, ...unref(getProps) } as Recordable),
+        () => ({ ...attrs, ...props, ...unref(getProps) }) as Recordable,
       );
 
       const getSchema = computed((): FormSchema[] => {
diff --git 
a/streampark-console/streampark-console-webapp/src/components/Form/src/components/FormItem.vue
 
b/streampark-console/streampark-console-webapp/src/components/Form/src/components/FormItem.vue
index 137ae94d6..9bbb5023f 100644
--- 
a/streampark-console/streampark-console-webapp/src/components/Form/src/components/FormItem.vue
+++ 
b/streampark-console/streampark-console-webapp/src/components/Form/src/components/FormItem.vue
@@ -333,8 +333,8 @@
             return slot
               ? getSlot(slots, slot, unref(getValues))
               : render
-              ? render(unref(getValues))
-              : renderComponent();
+                ? render(unref(getValues))
+                : renderComponent();
           };
 
           const showSuffix = !!suffix;
@@ -382,8 +382,8 @@
           return colSlot
             ? getSlot(slots, colSlot, values)
             : renderColContent
-            ? renderColContent(values)
-            : renderItem();
+              ? renderColContent(values)
+              : renderItem();
         };
 
         return (
diff --git 
a/streampark-console/streampark-console-webapp/src/components/Modal/src/components/ModalWrapper.vue
 
b/streampark-console/streampark-console-webapp/src/components/Modal/src/components/ModalWrapper.vue
index 1b1b9a613..52c8b5eb8 100644
--- 
a/streampark-console/streampark-console-webapp/src/components/Modal/src/components/ModalWrapper.vue
+++ 
b/streampark-console/streampark-console-webapp/src/components/Modal/src/components/ModalWrapper.vue
@@ -153,8 +153,8 @@
             realHeightRef.value = props.height
               ? props.height
               : realHeight > maxHeight
-              ? maxHeight
-              : realHeight;
+                ? maxHeight
+                : realHeight;
           }
           emit('height-change', unref(realHeightRef));
         } catch (error) {
diff --git 
a/streampark-console/streampark-console-webapp/src/components/Page/src/PageFooter.vue
 
b/streampark-console/streampark-console-webapp/src/components/Page/src/PageFooter.vue
index e89a6ce97..8fdbc8f41 100644
--- 
a/streampark-console/streampark-console-webapp/src/components/Page/src/PageFooter.vue
+++ 
b/streampark-console/streampark-console-webapp/src/components/Page/src/PageFooter.vue
@@ -39,7 +39,9 @@
     line-height: 44px;
     background-color: @component-background;
     border-top: 1px solid @border-color-base;
-    box-shadow: 0 -6px 16px -8px rgb(0 0 0 / 8%), 0 -9px 28px 0 rgb(0 0 0 / 
5%),
+    box-shadow:
+      0 -6px 16px -8px rgb(0 0 0 / 8%),
+      0 -9px 28px 0 rgb(0 0 0 / 5%),
       0 -12px 48px 16px rgb(0 0 0 / 3%);
     transition: width 0.2s;
 
diff --git 
a/streampark-console/streampark-console-webapp/src/components/Table/src/components/HeaderCell.vue
 
b/streampark-console/streampark-console-webapp/src/components/Table/src/components/HeaderCell.vue
index 35c080269..36ab854c5 100644
--- 
a/streampark-console/streampark-console-webapp/src/components/Table/src/components/HeaderCell.vue
+++ 
b/streampark-console/streampark-console-webapp/src/components/Table/src/components/HeaderCell.vue
@@ -22,7 +22,7 @@
     props: {
       column: {
         type: Object as PropType<BasicColumn>,
-        default: () => ({} as BasicColumn),
+        default: () => ({}) as BasicColumn,
       },
     },
     setup(props) {
diff --git 
a/streampark-console/streampark-console-webapp/src/hooks/setting/useMenuSetting.ts
 
b/streampark-console/streampark-console-webapp/src/hooks/setting/useMenuSetting.ts
index 3566bfd27..f501c6e80 100644
--- 
a/streampark-console/streampark-console-webapp/src/hooks/setting/useMenuSetting.ts
+++ 
b/streampark-console/streampark-console-webapp/src/hooks/setting/useMenuSetting.ts
@@ -105,8 +105,8 @@ export function useMenuSetting() {
     return siderHidden
       ? 0
       : collapsedShowTitle
-      ? SIDE_BAR_SHOW_TIT_MINI_WIDTH
-      : SIDE_BAR_MINI_WIDTH;
+        ? SIDE_BAR_SHOW_TIT_MINI_WIDTH
+        : SIDE_BAR_MINI_WIDTH;
   });
 
   const getCalcContentWidth = computed(() => {
@@ -114,9 +114,9 @@ export function useMenuSetting() {
       unref(getIsTopMenu) || !unref(getShowMenu) || (unref(getSplit) && 
unref(getMenuHidden))
         ? 0
         : unref(getIsMixSidebar)
-        ? (unref(getCollapsed) ? SIDE_BAR_MINI_WIDTH : 
SIDE_BAR_SHOW_TIT_MINI_WIDTH) +
-          (unref(getMixSideFixed) && unref(mixSideHasChildren) ? 
unref(getRealWidth) : 0)
-        : unref(getRealWidth);
+          ? (unref(getCollapsed) ? SIDE_BAR_MINI_WIDTH : 
SIDE_BAR_SHOW_TIT_MINI_WIDTH) +
+            (unref(getMixSideFixed) && unref(mixSideHasChildren) ? 
unref(getRealWidth) : 0)
+          : unref(getRealWidth);
 
     return `calc(100% - ${unref(width)}px)`;
   });
diff --git 
a/streampark-console/streampark-console-webapp/src/hooks/web/useLockPage.ts 
b/streampark-console/streampark-console-webapp/src/hooks/web/useLockPage.ts
index c543be954..9a6607421 100644
--- a/streampark-console/streampark-console-webapp/src/hooks/web/useLockPage.ts
+++ b/streampark-console/streampark-console-webapp/src/hooks/web/useLockPage.ts
@@ -32,9 +32,12 @@ export function useLockPage() {
     }
     clear();
 
-    timeId = setTimeout(() => {
-      lockPage();
-    }, lockTime * 60 * 1000);
+    timeId = setTimeout(
+      () => {
+        lockPage();
+      },
+      lockTime * 60 * 1000,
+    );
   }
 
   function lockPage(): void {
diff --git a/streampark-console/streampark-console-webapp/src/utils/props.ts 
b/streampark-console/streampark-console-webapp/src/utils/props.ts
index 4a15ec465..150dd01f4 100644
--- a/streampark-console/streampark-console-webapp/src/utils/props.ts
+++ b/streampark-console/streampark-console-webapp/src/utils/props.ts
@@ -15,9 +15,8 @@ type ResolveProp<T> = ExtractPropTypes<{
   key: { type: T; required: true };
 }>['key'];
 type ResolvePropType<T> = ResolveProp<T> extends { type: infer V } ? V : 
ResolveProp<T>;
-type ResolvePropTypeWithReadonly<T> = Readonly<T> extends Readonly<Array<infer 
A>>
-  ? ResolvePropType<A[]>
-  : ResolvePropType<T>;
+type ResolvePropTypeWithReadonly<T> =
+  Readonly<T> extends Readonly<Array<infer A>> ? ResolvePropType<A[]> : 
ResolvePropType<T>;
 
 type IfUnknown<T, V> = [unknown] extends [T] ? V : T;
 
@@ -28,8 +27,8 @@ export type BuildPropOption<T, D extends BuildPropType<T, V, 
C>, R, V, C> = {
   default?: R extends true
     ? never
     : D extends Record<string, unknown> | Array<any>
-    ? () => D
-    : (() => D) | D;
+      ? () => D
+      : (() => D) | D;
   validator?: ((val: any) => val is C) | ((val: any) => boolean);
 };
 
@@ -37,8 +36,8 @@ type _BuildPropType<T, V, C> =
   | (T extends PropWrapper<unknown>
       ? T[typeof wrapperKey]
       : [V] extends [never]
-      ? ResolvePropTypeWithReadonly<T>
-      : never)
+        ? ResolvePropTypeWithReadonly<T>
+        : never)
   | V
   | C;
 export type BuildPropType<T, V, C> = _BuildPropType<
@@ -53,8 +52,8 @@ type _BuildPropDefault<T, D> = [T] extends [
 ]
   ? D
   : D extends () => T
-  ? ReturnType<D>
-  : D;
+    ? ReturnType<D>
+    : D;
 
 export type BuildPropDefault<T, D, R> = R extends true
   ? { readonly default?: undefined }
@@ -146,12 +145,12 @@ export const buildProps = <
     [K in keyof O]: O[K] extends BuildPropReturn<any, any, any, any, any>
       ? O[K]
       : [O[K]] extends NativePropType
-      ? O[K]
-      : O[K] extends BuildPropOption<infer T, infer D, infer R, infer V, infer 
C>
-      ? D extends BuildPropType<T, V, C>
-        ? BuildPropOption<T, D, R, V, C>
-        : never
-      : never;
+        ? O[K]
+        : O[K] extends BuildPropOption<infer T, infer D, infer R, infer V, 
infer C>
+          ? D extends BuildPropType<T, V, C>
+            ? BuildPropOption<T, D, R, V, C>
+            : never
+          : never;
   },
 >(
   props: O,
@@ -162,20 +161,20 @@ export const buildProps = <
     [K in keyof O]: O[K] extends { [propKey]: boolean }
       ? O[K]
       : [O[K]] extends NativePropType
-      ? O[K]
-      : O[K] extends BuildPropOption<
-          infer T,
-          // eslint-disable-next-line @typescript-eslint/no-unused-vars
-          infer _D,
-          infer R,
-          infer V,
-          infer C
-        >
-      ? BuildPropReturn<T, O[K]['default'], R, V, C>
-      : never;
+        ? O[K]
+        : O[K] extends BuildPropOption<
+              infer T,
+              // eslint-disable-next-line @typescript-eslint/no-unused-vars
+              infer _D,
+              infer R,
+              infer V,
+              infer C
+            >
+          ? BuildPropReturn<T, O[K]['default'], R, V, C>
+          : never;
   };
 
-export const definePropType = <T>(val: any) => ({ [wrapperKey]: val } as 
PropWrapper<T>);
+export const definePropType = <T>(val: any) => ({ [wrapperKey]: val }) as 
PropWrapper<T>;
 
 export const keyOf = <T extends Object>(arr: T) => Object.keys(arr) as 
Array<keyof T>;
 export const mutable = <T extends readonly any[] | Record<string, 
unknown>>(val: T) =>
diff --git 
a/streampark-console/streampark-console-webapp/src/views/base/error-log/data.tsx
 
b/streampark-console/streampark-console-webapp/src/views/base/error-log/data.tsx
index 3ffc2f453..73120a425 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/base/error-log/data.tsx
+++ 
b/streampark-console/streampark-console-webapp/src/views/base/error-log/data.tsx
@@ -16,12 +16,12 @@ export function getColumns(): BasicColumn[] {
           text === ErrorTypeEnum.VUE
             ? 'green'
             : text === ErrorTypeEnum.RESOURCE
-            ? 'cyan'
-            : text === ErrorTypeEnum.PROMISE
-            ? 'blue'
-            : ErrorTypeEnum.AJAX
-            ? 'red'
-            : 'purple';
+              ? 'cyan'
+              : text === ErrorTypeEnum.PROMISE
+                ? 'blue'
+                : ErrorTypeEnum.AJAX
+                  ? 'red'
+                  : 'purple';
         return <Tag color={color}>{() => text}</Tag>;
       },
     },
diff --git 
a/streampark-console/streampark-console-webapp/src/views/base/login/Login.vue 
b/streampark-console/streampark-console-webapp/src/views/base/login/Login.vue
index d34791923..0a3858392 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/base/login/Login.vue
+++ 
b/streampark-console/streampark-console-webapp/src/views/base/login/Login.vue
@@ -15,7 +15,15 @@
   limitations under the License.
 -->
 <template>
-  <div class="h-full bg-gradient-primary overflow-auto">
+  <div class="relative h-full bg-gradient-primary overflow-auto">
+    <div class="flex items-center locale-picker-border absolute right-15 
top-10">
+      <AppLocalePicker
+        class="text-white enter-x locale-picker"
+        v-if="getShowLocalePicker"
+        :reload="true"
+        :showText="false"
+      />
+    </div>
     <div class="w-full relative h-[calc(100%-120px)] min-h-700px flex 
items-center section">
       <div class="scribble-box w-[80%] h-full absolute overflow-hidden">
         <figure class="scribble scale-2 !opacity-10 top-50 left-0">
@@ -59,8 +67,9 @@
         />
       </a>
       <p class="text-light-100 pt-10px" style="border-top: 1px solid #dad7d7">
-        Copyright © 2022 The Apache Software Foundation. Apache StreamPark, 
StreamPark, and its
-        feather logo are trademarks of The Apache Software Foundation.
+        Copyright © 2022-{{ `${new Date().getFullYear()}` }} The Apache 
Software Foundation. Apache
+        StreamPark, StreamPark, and its feather logo are trademarks of The 
Apache Software
+        Foundation.
       </p>
     </footer>
   </div>
@@ -71,6 +80,8 @@
   import { useDesign } from '/@/hooks/web/useDesign';
   import { Row, Col } from 'ant-design-vue';
   import { SvgIcon } from '/@/components/Icon';
+  import { useLocale } from '/@/locales/useLocale';
+  import { AppLocalePicker } from '/@/components/Application';
   defineProps({
     sessionTimeout: {
       type: Boolean,
@@ -78,6 +89,7 @@
   });
 
   // const globSetting = useGlobSetting();
+  const { getShowLocalePicker } = useLocale();
   const { prefixCls } = useDesign('login');
   sessionStorage.removeItem('appPageNo');
   // const title = computed(() => globSetting?.title ?? '');
@@ -138,4 +150,13 @@
       }
     }
   }
+
+  .locale-picker-border {
+    border: 1px solid rgba(255, 255, 255, 0.6);
+    border-radius: 6px;
+  }
+
+  .locale-picker {
+    padding: 6px;
+  }
 </style>
diff --git 
a/streampark-console/streampark-console-webapp/src/views/base/login/LoginForm.vue
 
b/streampark-console/streampark-console-webapp/src/views/base/login/LoginForm.vue
index ca3bc116a..ce7b1307e 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/base/login/LoginForm.vue
+++ 
b/streampark-console/streampark-console-webapp/src/views/base/login/LoginForm.vue
@@ -68,7 +68,7 @@
     </FormItem>
 
     <FormItem class="enter-x text-left">
-      <Button :href="BASE_ADDRESS+SSO_LOGIN_PATH" type="link" v-if="enableSSO">
+      <Button :href="BASE_ADDRESS + SSO_LOGIN_PATH" type="link" 
v-if="enableSSO">
         {{ t('sys.login.ssoSignIn') }}
       </Button>
       <Button type="link" class="float-right" @click="changeLoginType" 
v-if="enableLDAP">
@@ -117,7 +117,7 @@
     account: string;
     password: string;
   }
-  const BASE_ADDRESS= import.meta.env.VITE_BASE_ADDRESS;
+  const BASE_ADDRESS = import.meta.env.VITE_BASE_ADDRESS;
   const formRef = ref();
   const loading = ref(false);
   const userId = ref('');
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/EditFlink.vue
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/EditFlink.vue
index 23a6b9106..1aec48dab 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/EditFlink.vue
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/EditFlink.vue
@@ -100,10 +100,6 @@
         },
         versionId: app.versionId || null,
         k8sRestExposedType: app.k8sRestExposedType,
-        clusterId: app.clusterId,
-        [app.executionMode == ExecModeEnum.YARN_SESSION
-          ? 'yarnSessionClusterId'
-          : 'flinkClusterId']: app.flinkClusterId,
         flinkImage: app.flinkImage,
         k8sNamespace: app.k8sNamespace,
         alertId: selectAlertId,
@@ -114,6 +110,19 @@
       if (!executionMode) {
         Object.assign(defaultParams, { executionMode: app.executionMode });
       }
+      switch (app.executionMode) {
+        case ExecModeEnum.REMOTE:
+          defaultParams['remoteClusterId'] = app.flinkClusterId;
+          break;
+        case ExecModeEnum.YARN_SESSION:
+          defaultParams['yarnSessionClusterId'] = app.flinkClusterId;
+          break;
+        case ExecModeEnum.KUBERNETES_SESSION:
+          defaultParams['k8sSessionClusterId'] = app.flinkClusterId;
+          break;
+        default:
+          break;
+      }
       setFieldsValue(defaultParams);
       app.args && programArgRef.value?.setContent(app.args);
       setTimeout(() => {
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/EditStreamPark.vue
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/EditStreamPark.vue
index 6231939a6..220d474dd 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/EditStreamPark.vue
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/EditStreamPark.vue
@@ -121,14 +121,23 @@
           cpFailureRateInterval: app.cpFailureRateInterval,
           cpFailureAction: app.cpFailureAction,
         },
-        clusterId: app.clusterId,
-        [app.executionMode == ExecModeEnum.YARN_SESSION
-          ? 'yarnSessionClusterId'
-          : 'flinkClusterId']: app.flinkClusterId,
         flinkImage: app.flinkImage,
         k8sNamespace: app.k8sNamespace,
         ...resetParams,
       };
+      switch (app.executionMode) {
+        case ExecModeEnum.REMOTE:
+          defaultParams['remoteClusterId'] = app.flinkClusterId;
+          break;
+        case ExecModeEnum.YARN_SESSION:
+          defaultParams['yarnSessionClusterId'] = app.flinkClusterId;
+          break;
+        case ExecModeEnum.KUBERNETES_SESSION:
+          defaultParams['k8sSessionClusterId'] = app.flinkClusterId;
+          break;
+        default:
+          break;
+      }
       if (!executionMode) {
         Object.assign(defaultParams, { executionMode: app.executionMode });
       }
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/View.vue 
b/streampark-console/streampark-console-webapp/src/views/flink/app/View.vue
index 66ebdfd16..bd9b9fc16 100644
--- a/streampark-console/streampark-console-webapp/src/views/flink/app/View.vue
+++ b/streampark-console/streampark-console-webapp/src/views/flink/app/View.vue
@@ -194,11 +194,6 @@
     optionApps,
   );
 
-  // build Detail
-  function openBuildProgressDetailDrawer(app: AppListRecord) {
-    openBuildDrawer(true, { appId: app.id });
-  }
-
   /* view */
   async function handleJobView(app: AppListRecord) {
     // Task is running, restarting, in savePoint
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/components/AppDetail/DetailTab.vue
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/components/AppDetail/DetailTab.vue
index d5bedd15a..c450b1286 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/components/AppDetail/DetailTab.vue
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/components/AppDetail/DetailTab.vue
@@ -232,6 +232,7 @@
     try {
       const res = await fetchFlinkSql({
         id: record.id,
+        appId: record.appId,
       });
       openFlinkDrawer(true, {
         sql: decodeByBase64(res.sql),
@@ -251,8 +252,8 @@
 
   /* delete flink sql */
   async function handleDeleteFlinkSql(record: Recordable) {
-    await fetchRemoveFlinkSql({ id: record.id });
-    reloadFlinkSql();
+    await fetchRemoveFlinkSql({ id: record.id, appId: record.appId });
+    await reloadFlinkSql();
   }
 
   function handleCompare(record: Recordable) {
@@ -344,7 +345,7 @@
 
   /* delete savePoint */
   async function handleDeleteSavePoint(record: Recordable) {
-    await fetchRemoveSavePoint({ id: record.id });
+    await fetchRemoveSavePoint({ id: record.id, appId: record.appId });
     reloadSavePoint();
   }
 
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/components/AppDetail/FlinkSqlCompareModal.vue
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/components/AppDetail/FlinkSqlCompareModal.vue
index 0c5c28b3c..a99a9f287 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/components/AppDetail/FlinkSqlCompareModal.vue
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/components/AppDetail/FlinkSqlCompareModal.vue
@@ -40,6 +40,7 @@
   const submitLoading = ref<boolean>(false);
   const compareRecord = reactive<Recordable>({});
   const values = useDetailProviderContext();
+  const appId = ref<any>();
 
   const [registerFlinkSqlDifferentDrawer, { openDrawer: openFlinkSqlDiffDrawer 
}] = useDrawer();
   const [registerModal, { closeModal }] = useModalInner((data) => {
@@ -47,8 +48,9 @@
   });
 
   async function onReceiveModalData(data) {
+    appId.value = toRaw(values).app.id;
     const res = await fetchFlinkSqlList({
-      appId: toRaw(values).app.id,
+      appId: appId.value,
       pageNo: 1,
       pageSize: 999999,
     });
@@ -83,11 +85,13 @@
     try {
       const source = await fetchFlinkSql({
         id: compareRecord.id,
+        appId: appId.value,
       });
       const sourceSql = decodeByBase64(source.sql);
       const sourceVersion = source.version;
       const target = await fetchFlinkSql({
         id: values.target,
+        appId: appId.value,
       });
       const targetSql = decodeByBase64(target.sql);
       const targetVersion = target.version;
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/components/FlinkSql.vue
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/components/FlinkSql.vue
index 3b9217c9f..865709c7f 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/components/FlinkSql.vue
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/components/FlinkSql.vue
@@ -39,7 +39,7 @@
   const { t } = useI18n();
 
   const flinkSql = ref();
-  const vertifyRes = reactive({
+  const verifyRes = reactive({
     errorMsg: '',
     verified: false,
     errorStart: 0,
@@ -55,6 +55,9 @@
       type: String,
       default: '',
     },
+    appId: {
+      type: String as PropType<Nullable<string>>,
+    },
     versionId: {
       type: String as PropType<Nullable<string>>,
     },
@@ -68,7 +71,7 @@
   /* verify */
   async function handleVerifySql() {
     if (isEmpty(props.value)) {
-      vertifyRes.errorMsg = 'empty sql';
+      verifyRes.errorMsg = 'empty sql';
       return false;
     }
 
@@ -83,22 +86,22 @@
         });
         const success = data.data === true || data.data === 'true';
         if (success) {
-          vertifyRes.verified = true;
-          vertifyRes.errorMsg = '';
+          verifyRes.verified = true;
+          verifyRes.errorMsg = '';
           syntaxError();
           return true;
         } else {
-          vertifyRes.errorStart = parseInt(data.start);
-          vertifyRes.errorEnd = parseInt(data.end);
+          verifyRes.errorStart = parseInt(data.start);
+          verifyRes.errorEnd = parseInt(data.end);
           switch (data.type) {
             case 4:
-              vertifyRes.errorMsg = 'Unsupported sql';
+              verifyRes.errorMsg = 'Unsupported sql';
               break;
             case 5:
-              vertifyRes.errorMsg = "SQL is not endWith ';'";
+              verifyRes.errorMsg = "SQL is not endWith ';'";
               break;
             default:
-              vertifyRes.errorMsg = data.message;
+              verifyRes.errorMsg = data.message;
               break;
           }
           syntaxError();
@@ -116,14 +119,14 @@
     if (editor) {
       const model = editor.getModel();
       const monaco = await getMonacoInstance();
-      if (vertifyRes.errorMsg) {
+      if (verifyRes.errorMsg) {
         try {
           monaco.editor.setModelMarkers(model, 'sql', [
             {
-              startLineNumber: vertifyRes.errorStart,
-              endLineNumber: vertifyRes.errorEnd,
+              startLineNumber: verifyRes.errorStart,
+              endLineNumber: verifyRes.errorEnd,
               severity: monaco.MarkerSeverity.Error,
-              message: vertifyRes.errorMsg,
+              message: verifyRes.errorMsg,
             },
           ]);
         } catch (e) {
@@ -173,7 +176,7 @@
   const flinkEditorClass = computed(() => {
     return {
       ...fullEditorClass.value,
-      ['syntax-' + (vertifyRes.errorMsg ? 'false' : 'true')]: true,
+      ['syntax-' + (verifyRes.errorMsg ? 'false' : 'true')]: true,
     };
   });
 
@@ -226,11 +229,11 @@
     </ButtonGroup>
     <div class="flex items-center justify-between" v-else>
       <div class="mt-10px flex-1 mr-10px overflow-hidden whitespace-nowrap">
-        <div class="text-red-600 overflow-ellipsis overflow-hidden" 
v-if="vertifyRes.errorMsg">
-          {{ vertifyRes.errorMsg }}
+        <div class="text-red-600 overflow-ellipsis overflow-hidden" 
v-if="verifyRes.errorMsg">
+          {{ verifyRes.errorMsg }}
         </div>
         <div v-else class="text-green-700">
-          <span v-if="vertifyRes.verified"> {{ 
t('flink.app.flinkSql.successful') }} </span>
+          <span v-if="verifyRes.verified"> {{ 
t('flink.app.flinkSql.successful') }} </span>
         </div>
       </div>
       <div class="flinksql-tool">
@@ -262,9 +265,9 @@
     </div>
   </div>
   <p class="conf-desc mt-10px" v-if="!fullScreenStatus">
-    <span class="text-red-600" v-if="vertifyRes.errorMsg"> {{ 
vertifyRes.errorMsg }} </span>
+    <span class="text-red-600" v-if="verifyRes.errorMsg"> {{ 
verifyRes.errorMsg }} </span>
     <span v-else class="text-green-700">
-      <span v-if="vertifyRes.verified"> {{ t('flink.app.flinkSql.successful') 
}} </span>
+      <span v-if="verifyRes.verified"> {{ t('flink.app.flinkSql.successful') 
}} </span>
     </span>
   </p>
 </template>
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/data/detail.data.ts
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/data/detail.data.ts
index fc5d884c7..7e71e2a69 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/data/detail.data.ts
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/data/detail.data.ts
@@ -132,7 +132,7 @@ export const getBackupColumns = (): BasicColumn[] => [
 
 export const getOptionLogColumns = (): BasicColumn[] => [
   { title: 'Operation Name', dataIndex: 'optionName', width: 150 },
-  { title: 'Application Id', dataIndex: 'yarnAppId' },
+  { title: 'Cluster Id', dataIndex: 'yarnAppId' },
   { title: 'JobManager URL', dataIndex: 'jobManagerUrl' },
   { title: 'Start Status', dataIndex: 'success', width: 120 },
   { title: 'Option Time', dataIndex: 'optionTime', width: 200 },
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useDetail.ts
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useDetail.ts
index 92a3b924e..15c98953c 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useDetail.ts
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useDetail.ts
@@ -108,7 +108,7 @@ export const userDetail = (
   }
   /* delete savePoint */
   async function handleDeleteSavePoint(record: Recordable) {
-    await fetchRemoveSavePoint({ id: record.id });
+    await fetchRemoveSavePoint({ id: record.id, appId: record.appId });
     reloadSavePoint();
   }
 
diff --git 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useEditStreamPark.ts
 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useEditStreamPark.ts
index 6a7b004a7..0f4659e15 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useEditStreamPark.ts
+++ 
b/streampark-console/streampark-console-webapp/src/views/flink/app/hooks/useEditStreamPark.ts
@@ -37,6 +37,7 @@ export const useEditStreamParkSchema = (
 ) => {
   const flinkSql = ref();
   const route = useRoute();
+  const appId = route.query.appId as string;
   const {
     alerts,
     flinkEnvs,
@@ -48,14 +49,14 @@ export const useEditStreamParkSchema = (
     getExecutionModeSchema,
     suggestions,
   } = useCreateAndEditSchema(dependencyRef, {
-    appId: route.query.appId as string,
+    appId: appId,
     mode: 'streampark',
   });
   const { createMessage } = useMessage();
   const [registerDifferentDrawer, { openDrawer: openDiffDrawer }] = 
useDrawer();
 
   async function handleChangeSQL(v: string) {
-    const res = await fetchFlinkSql({ id: v });
+    const res = await fetchFlinkSql({ id: v, appId: appId });
     flinkSql.value?.setContent(decodeByBase64(res.sql));
     console.log('res', flinkSql.value);
     unref(dependencyRef)?.setDefaultValue(JSON.parse(res.dependency || '{}'));
@@ -66,7 +67,7 @@ export const useEditStreamParkSchema = (
       createMessage.warning('Two versions must be selected for comparison');
       return Promise.reject('error, compareSQL array length less thatn 2');
     }
-    const res = await fetchFlinkSql({ id: compareSQL.join(',') });
+    const res = await fetchFlinkSql({ appId: appId, id: compareSQL.join(',') 
});
     const obj1 = res[0];
     const obj2 = res[1];
     const sql1 = decodeByBase64(obj1.sql);

Reply via email to