This is an automated email from the ASF dual-hosted git repository.
journey pushed a commit to branch 1.3.3-release
in repository https://gitbox.apache.org/repos/asf/incubator-dolphinscheduler.git
The following commit(s) were added to refs/heads/1.3.3-release by this push:
new 2e4f9f1 [Fix-#3713][common]Fix that catfile method Stream not closed
(#3810)
2e4f9f1 is described below
commit 2e4f9f19ceccdce67d87fbb9faee74fe091f2055
Author: lgcareer <[email protected]>
AuthorDate: Thu Sep 24 15:28:02 2020 +0800
[Fix-#3713][common]Fix that catfile method Stream not closed (#3810)
* [Bug-3713][HadoopUtils] catfile method Stream not closed (#3715)
* fix bug
Delete invalid field: executorcores
Modify verification prompt
* fix bug
Delete invalid field: executorcores
Modify verification prompt
* fix bug
Delete invalid field: executorcores
Modify verification prompt
* dag add close button
* reset last version
* reset last version
* dag add close buttion
dag add close buttion
* update CLICK_SAVE_WORKFLOW_BUTTON xpath
* updae CLICK_SAVE_WORKFLOW_BUTTON xpath
* updae CLICK_SAVE_WORKFLOW_BUTTON xpath
* updae CLICK_SAVE_WORKFLOW_BUTTON xpath
* Update CreateWorkflowLocator.java
modify submit workflow button
* Update CreateWorkflowLocator.java
* Update CreateWorkflowLocator.java
modify CLICK_ADD_BUTTON
* Update CreateWorkflowLocator.java
delete print
* Update CreateWorkflowLocator.java
1
* Update CreateWorkflowLocator.java
1
* Setting '-XX:+DisableExplicitGC ' causes netty memory leaks
in addition
update '- XX: largepagesizeinbytes = 128M' to '- XX: largepagesizeinbytes =
10M'
* Update dag.vue
* Update dag.vue
* Update dag.vue
* Update CreateWorkflowLocator.java
* Revert "Setting '-XX:+DisableExplicitGC ' causes netty memory leaks"
This reverts commit 3a2cba7a
* Setting '-XX:+DisableExplicitGC ' causes netty memory leaks
in addition
update '- XX: largepagesizeinbytes = 128M' to '- XX: largepagesizeinbytes =
10M'
* Update dolphinscheduler-daemon.sh
* catfile method Stream not closed
* catfile method Stream not closed
Co-authored-by: dailidong <[email protected]>
Co-authored-by: xingchun-chen
<[email protected]>
* [Fix-#3713][common]Fix that catfile method Stream not closed
Co-authored-by: BoYiZhang <[email protected]>
Co-authored-by: dailidong <[email protected]>
Co-authored-by: xingchun-chen
<[email protected]>
---
.../dolphinscheduler/common/utils/HadoopUtils.java | 26 ++++++++++------------
1 file changed, 12 insertions(+), 14 deletions(-)
diff --git
a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
index b4eebd6..9f35f45 100644
---
a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
+++
b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
@@ -195,7 +195,7 @@ public class HadoopUtils implements Closeable {
*/
String appUrl = "";
- if (StringUtils.isEmpty(rmHaIds)){
+ if (StringUtils.isEmpty(rmHaIds)) {
//single resourcemanager enabled
appUrl = appAddress;
yarnEnabled = true;
@@ -206,7 +206,7 @@ public class HadoopUtils implements Closeable {
logger.info("application url : {}", appUrl);
}
- if(StringUtils.isBlank(appUrl)){
+ if (StringUtils.isBlank(appUrl)) {
throw new Exception("application url is blank");
}
return String.format(appUrl, applicationId);
@@ -226,11 +226,11 @@ public class HadoopUtils implements Closeable {
return new byte[0];
}
- FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath));
- return IOUtils.toByteArray(fsDataInputStream);
+ try (FSDataInputStream fsDataInputStream = fs.open(new
Path(hdfsFilePath))) {
+ return IOUtils.toByteArray(fsDataInputStream);
+ }
}
-
/**
* cat file on hdfs
*
@@ -493,20 +493,19 @@ public class HadoopUtils implements Closeable {
return String.format("%s/udfs", getHdfsTenantDir(tenantCode));
}
-
/**
* get hdfs file name
*
- * @param resourceType resource type
- * @param tenantCode tenant code
- * @param fileName file name
+ * @param resourceType resource type
+ * @param tenantCode tenant code
+ * @param fileName file name
* @return hdfs file name
*/
public static String getHdfsFileName(ResourceType resourceType, String
tenantCode, String fileName) {
if (fileName.startsWith("/")) {
- fileName = fileName.replaceFirst("/","");
+ fileName = fileName.replaceFirst("/", "");
}
- return String.format("%s/%s", getHdfsDir(resourceType,tenantCode),
fileName);
+ return String.format("%s/%s", getHdfsDir(resourceType, tenantCode),
fileName);
}
/**
@@ -518,7 +517,7 @@ public class HadoopUtils implements Closeable {
*/
public static String getHdfsResourceFileName(String tenantCode, String
fileName) {
if (fileName.startsWith("/")) {
- fileName = fileName.replaceFirst("/","");
+ fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsResDir(tenantCode), fileName);
}
@@ -532,7 +531,7 @@ public class HadoopUtils implements Closeable {
*/
public static String getHdfsUdfFileName(String tenantCode, String
fileName) {
if (fileName.startsWith("/")) {
- fileName = fileName.replaceFirst("/","");
+ fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsUdfDir(tenantCode), fileName);
}
@@ -545,7 +544,6 @@ public class HadoopUtils implements Closeable {
return String.format("%s/%s", getHdfsDataBasePath(), tenantCode);
}
-
/**
* getAppAddress
*