This is an automated email from the ASF dual-hosted git repository.

wuzhiguo pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/bigtop-manager.git


The following commit(s) were added to refs/heads/main by this push:
     new d580030f BIGTOP-4508: Fix datanode and nodemanager unable to create 
multi disk directories (#280)
d580030f is described below

commit d580030fd794c89ea60fef775c5c87a49904979e
Author: lvkaihua <[email protected]>
AuthorDate: Thu Oct 16 23:51:12 2025 +0800

    BIGTOP-4508: Fix datanode and nodemanager unable to create multi disk 
directories (#280)
---
 .../stack/bigtop/v3_3_0/hadoop/HadoopSetup.java    | 36 +++++++++++++---------
 1 file changed, 22 insertions(+), 14 deletions(-)

diff --git 
a/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/hadoop/HadoopSetup.java
 
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/hadoop/HadoopSetup.java
index a15dcf72..88375ace 100644
--- 
a/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/hadoop/HadoopSetup.java
+++ 
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/hadoop/HadoopSetup.java
@@ -82,22 +82,30 @@ public class HadoopSetup {
                             hadoopGroup,
                             Constants.PERMISSION_755,
                             true);
-                    LinuxFileUtils.createDirectories(
-                            hadoopParams.getDfsDataDir(), hadoopUser, 
hadoopGroup, Constants.PERMISSION_755, true);
+                    if (StringUtils.isNotBlank(hadoopParams.getDfsDataDir())) {
+                        String[] dfsDataDirs = 
hadoopParams.getDfsDataDir().split("\\s*,\\s*");
+                        for (String dir : dfsDataDirs) {
+                            LinuxFileUtils.createDirectories(
+                                    dir, hadoopUser, hadoopGroup, 
Constants.PERMISSION_755, true);
+                        }
+                    }
                 }
                 case "nodemanager": {
-                    LinuxFileUtils.createDirectories(
-                            hadoopParams.getNodeManagerLogDir(),
-                            hadoopUser,
-                            hadoopGroup,
-                            Constants.PERMISSION_755,
-                            true);
-                    LinuxFileUtils.createDirectories(
-                            hadoopParams.getNodeManagerLocalDir(),
-                            hadoopUser,
-                            hadoopGroup,
-                            Constants.PERMISSION_755,
-                            true);
+                    if 
(StringUtils.isNotBlank(hadoopParams.getNodeManagerLogDir())) {
+                        String[] nmLogDirs = 
hadoopParams.getNodeManagerLogDir().split("\\s*,\\s*");
+                        for (String dir : nmLogDirs) {
+                            LinuxFileUtils.createDirectories(
+                                    dir, hadoopUser, hadoopGroup, 
Constants.PERMISSION_755, true);
+                        }
+                    }
+                    if 
(StringUtils.isNotBlank(hadoopParams.getNodeManagerLocalDir())) {
+                        String[] nmLocalDirs =
+                                
hadoopParams.getNodeManagerLocalDir().split("\\s*,\\s*");
+                        for (String dir : nmLocalDirs) {
+                            LinuxFileUtils.createDirectories(
+                                    dir, hadoopUser, hadoopGroup, 
Constants.PERMISSION_755, true);
+                        }
+                    }
                 }
             }
         }

Reply via email to