This is an automated email from the ASF dual-hosted git repository.
benjobs pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-streampark.git
The following commit(s) were added to refs/heads/dev by this push:
new 832908ac6 [Hotfix] Fix non-existed methods references and
exceptions-thrown. (#3039)
832908ac6 is described below
commit 832908ac61014379ef806b3db95f55585deb5420
Author: Yuepeng Pan <[email protected]>
AuthorDate: Sun Sep 10 14:45:46 2023 +0800
[Hotfix] Fix non-existed methods references and exceptions-thrown. (#3039)
---
.../apache/streampark/console/core/runner/EnvInitializer.java | 2 ++
.../flink/client/impl/KubernetesApplicationClientV2.scala | 10 +++++++---
2 files changed, 9 insertions(+), 3 deletions(-)
diff --git
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java
index ab93e264b..73214ea7f 100644
---
a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java
+++
b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java
@@ -32,6 +32,7 @@ import org.apache.streampark.console.core.entity.FlinkEnv;
import org.apache.streampark.console.core.service.SettingService;
import org.apache.streampark.flink.kubernetes.v2.fs.EmbeddedFileServer;
+import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
@@ -72,6 +73,7 @@ public class EnvInitializer implements ApplicationRunner {
"^streampark-flink-shims_flink-(1.1[2-7])_(2.12)-(.*).jar$",
Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
+ @SneakyThrows
@Override
public void run(ApplicationArguments args) throws Exception {
Optional<String> profile =
diff --git
a/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/impl/KubernetesApplicationClientV2.scala
b/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/impl/KubernetesApplicationClientV2.scala
index 665159c03..130dcd17e 100644
---
a/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/impl/KubernetesApplicationClientV2.scala
+++
b/streampark-flink/streampark-flink-client/streampark-flink-client-core/src/main/scala/org/apache/streampark/flink/client/impl/KubernetesApplicationClientV2.scala
@@ -25,6 +25,7 @@ import
org.apache.streampark.flink.kubernetes.v2.model.{FlinkDeploymentDef, JobM
import org.apache.streampark.flink.kubernetes.v2.operator.FlinkK8sOperator
import org.apache.streampark.flink.packer.pipeline.K8sAppModeBuildResponse
+import org.apache.commons.lang3.StringUtils
import org.apache.flink.client.deployment.application.ApplicationConfiguration
import org.apache.flink.configuration._
import org.apache.flink.kubernetes.configuration.KubernetesConfigOptions
@@ -88,12 +89,12 @@ object KubernetesApplicationClientV2 extends
KubernetesClientV2Trait with Logger
.getOrElse("default")
val name = Option(submitReq.k8sSubmitParam.clusterId)
- .filter(!_.isBlank)
+ .filter(str => StringUtils.isNotBlank(str))
.getOrElse(return Left("cluster-id should not be empty"))
val image = submitReq.k8sSubmitParam.baseImage
.orElse(Option(buildResult.flinkBaseImage))
- .filter(!_.isBlank)
+ .filter(str => StringUtils.isNotBlank(str))
.getOrElse(return Left("Flink base image should not be empty"))
val imagePullPolicy = flinkConfObj
@@ -180,7 +181,10 @@ object KubernetesApplicationClientV2 extends
KubernetesClientV2Trait with Logger
logConfigs
.map { case (name, path) => name -> os.Path(path) }
.filter { case (_, path) => Try(os.exists(path) &&
os.isFile(path)).getOrElse(false) }
- .map { case (name, path) => name ->
Try(os.read(path)).toOption.filter(!_.isBlank) }
+ .map {
+ case (name, path) =>
+ name -> Try(os.read(path)).toOption.filter(str =>
StringUtils.isNotBlank(str))
+ }
.filter { case (_, content) => content.isDefined }
.map { case (name, content) => name -> content.get }
.foreach { case (name, content) => items += name -> content }