This is an automated email from the ASF dual-hosted git repository.

benjobs pushed a commit to branch 1.20
in repository https://gitbox.apache.org/repos/asf/streampark.git

commit 399db0982f5a39c2a8417c9aed89e072d0002d70
Author: benjobs <[email protected]>
AuthorDate: Sun Jun 22 10:06:16 2025 +0800

    [Improve] The flink version for streampark-console has been upgraded to 
1.20.
---
 pom.xml                                                             | 2 +-
 .../src/views/setting/extlink/components/useExternalLink.tsx        | 2 +-
 .../org/apache/streampark/flink/client/impl/YarnPerJobClient.scala  | 3 +--
 .../apache/streampark/flink/kubernetes/KubernetesRetriever.scala    | 6 +++---
 4 files changed, 6 insertions(+), 7 deletions(-)

diff --git a/pom.xml b/pom.xml
index 804eb49a3..bdb785873 100644
--- a/pom.xml
+++ b/pom.xml
@@ -93,7 +93,7 @@
         
<scala.binary.flink.version>_${scala.binary.version}</scala.binary.flink.version>
         <flink.connector.version>3.2.0-1.18</flink.connector.version>
         
<flink.elasticserch.connector.version>3.0.1-1.17</flink.elasticserch.connector.version>
-        <flink.version>1.18.1</flink.version>
+        <flink.version>1.20.0</flink.version>
         <flink.shaded.version>1.8.1</flink.shaded.version>
         <streampark.shaded.version>1.0.0</streampark.shaded.version>
         <streampark.flink.shims.version>1.14</streampark.flink.shims.version>
diff --git 
a/streampark-console/streampark-console-webapp/src/views/setting/extlink/components/useExternalLink.tsx
 
b/streampark-console/streampark-console-webapp/src/views/setting/extlink/components/useExternalLink.tsx
index 08c84eb95..7db722400 100644
--- 
a/streampark-console/streampark-console-webapp/src/views/setting/extlink/components/useExternalLink.tsx
+++ 
b/streampark-console/streampark-console-webapp/src/views/setting/extlink/components/useExternalLink.tsx
@@ -46,7 +46,7 @@ export const renderColorField = (formModel: Recordable, 
field: string) => {
         onBadgeClick={handlePromptColorUpdate(DEFAULT_GREEN_HEX)}
       />
       <LinkBadge
-        label="Fink"
+        label="Link"
         message="Metrics"
         color={DEFAULT_BLUE_HEX}
         onBadgeClick={handlePromptColorUpdate(DEFAULT_BLUE_HEX)}
diff --git 
a/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/impl/YarnPerJobClient.scala
 
b/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/impl/YarnPerJobClient.scala
index 5e8d6dfd0..d19ba8787 100644
--- 
a/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/impl/YarnPerJobClient.scala
+++ 
b/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/impl/YarnPerJobClient.scala
@@ -30,7 +30,6 @@ import 
org.apache.flink.yarn.entrypoint.YarnJobClusterEntrypoint
 import org.apache.hadoop.fs.{Path => HadoopPath}
 import org.apache.hadoop.yarn.api.records.ApplicationId
 
-import java.io.File
 import java.lang.{Boolean => JavaBool}
 
 /** yarn PerJob mode submit */
@@ -60,7 +59,7 @@ object YarnPerJobClient extends YarnClientTrait {
       getYarnClusterDeployDescriptor(flinkConfig, submitRequest.hadoopUser)
     val flinkDistJar = FlinkUtils.getFlinkDistJar(flinkHome)
     clusterDescriptor.setLocalJarPath(new HadoopPath(flinkDistJar))
-    clusterDescriptor.addShipFiles(List(new File(s"$flinkHome/lib")))
+    clusterDescriptor.addShipFiles(List(new HadoopPath(s"$flinkHome/lib")))
 
     var packagedProgram: PackagedProgram = null
     val clusterClient = {
diff --git 
a/streampark-flink/streampark-flink-kubernetes/src/main/scala/org/apache/streampark/flink/kubernetes/KubernetesRetriever.scala
 
b/streampark-flink/streampark-flink-kubernetes/src/main/scala/org/apache/streampark/flink/kubernetes/KubernetesRetriever.scala
index 21b96b41a..397634fde 100644
--- 
a/streampark-flink/streampark-flink-kubernetes/src/main/scala/org/apache/streampark/flink/kubernetes/KubernetesRetriever.scala
+++ 
b/streampark-flink/streampark-flink-kubernetes/src/main/scala/org/apache/streampark/flink/kubernetes/KubernetesRetriever.scala
@@ -44,7 +44,7 @@ object KubernetesRetriever extends Logger {
 
   // see org.apache.flink.configuration.RestOptions.AWAIT_LEADER_TIMEOUT
   val FLINK_REST_AWAIT_TIMEOUT_SEC: Timeout =
-    Timeout.ofMilliseconds(RestOptions.AWAIT_LEADER_TIMEOUT.defaultValue())
+    
Timeout.ofMilliseconds(RestOptions.AWAIT_LEADER_TIMEOUT.defaultValue().toMillis)
 
   private val DEPLOYMENT_LOST_TIME = collection.mutable.Map[String, Long]()
 
@@ -63,7 +63,7 @@ object KubernetesRetriever extends Logger {
     new DefaultClusterClientServiceLoader()
 
   /** get new flink cluster client of kubernetes mode */
-  def newFinkClusterClient(
+  def newFlinkClusterClient(
       clusterId: String,
       @Nullable namespace: String,
       executeMode: FlinkK8sDeployMode.Value): Option[ClusterClient[String]] = {
@@ -157,7 +157,7 @@ object KubernetesRetriever extends Logger {
   /** retrieve flink jobManager rest url */
   def retrieveFlinkRestUrl(clusterKey: ClusterKey): Option[String] = {
     val client = KubernetesRetriever
-      .newFinkClusterClient(clusterKey.clusterId, clusterKey.namespace, 
clusterKey.executeMode)
+      .newFlinkClusterClient(clusterKey.clusterId, clusterKey.namespace, 
clusterKey.executeMode)
       .getOrElse(return None)
     val url =
       IngressController.getIngressUrlAddress(clusterKey.namespace, 
clusterKey.clusterId, client)

Reply via email to