This is an automated email from the ASF dual-hosted git repository. gaurava pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/trunk by this push: new 65f656738aa HADOOP-19488 fix the temporary direction creation on Windows (#7511) 65f656738aa is described below commit 65f656738aa805b26e71aaeda19213b2896a314b Author: Sangjin Lee <sj...@apache.org> AuthorDate: Fri Mar 21 23:47:43 2025 -0700 HADOOP-19488 fix the temporary direction creation on Windows (#7511) * The Windows filesystem does not recognize the POSIX permissions, and it was causing the directory creation failure. * This PR checks if the filesystem does not support POSIX permissions, it switches to using the user-based, ACL-based permissions that work on Windows. --- .../main/java/org/apache/hadoop/util/RunJar.java | 72 +++++++++++++++++++--- .../java/org/apache/hadoop/util/TestRunJar.java | 19 ++++++ 2 files changed, 81 insertions(+), 10 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java index e527f602cdd..881dd2defc8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java @@ -27,12 +27,17 @@ import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; +import java.nio.file.FileSystems; import java.nio.file.Files; +import java.nio.file.attribute.AclEntry; +import java.nio.file.attribute.AclEntryPermission; import java.nio.file.attribute.FileAttribute; -import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; +import java.nio.file.attribute.UserPrincipal; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; +import java.util.EnumSet; import java.util.Enumeration; import java.util.List; import java.util.Set; @@ -51,6 +56,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static java.nio.file.attribute.AclEntryFlag.DIRECTORY_INHERIT; +import static java.nio.file.attribute.AclEntryFlag.FILE_INHERIT; +import static java.nio.file.attribute.AclEntryType.ALLOW; + /** Run a Hadoop job jar. */ @InterfaceAudience.Private @InterfaceStability.Unstable @@ -286,25 +295,19 @@ public void run(String[] args) throws Throwable { } mainClassName = mainClassName.replaceAll("/", "."); - File tmpDir = new File(System.getProperty("java.io.tmpdir")); - ensureDirectory(tmpDir); - final File workDir; try { - FileAttribute<Set<PosixFilePermission>> perms = PosixFilePermissions - .asFileAttribute(PosixFilePermissions.fromString("rwx------")); - workDir = Files.createTempDirectory(tmpDir.toPath(), "hadoop-unjar", perms).toFile(); + workDir = createWorkDirectory(); } catch (IOException | SecurityException e) { // If user has insufficient perms to write to tmpDir, default // "Permission denied" message doesn't specify a filename. System.err.println("Error creating temp dir in java.io.tmpdir " - + tmpDir + " due to " + e.getMessage()); + + System.getProperty("java.io.tmpdir") + " due to " + + e.getMessage()); System.exit(-1); return; } - ensureDirectory(workDir); - ShutdownHookManager.get().addShutdownHook( new Runnable() { @Override @@ -333,6 +336,55 @@ public void run() { } } + static File createWorkDirectory() throws IOException { + File tmpDir = new File(System.getProperty("java.io.tmpdir")); + ensureDirectory(tmpDir); + + File workDir = Files.createTempDirectory(tmpDir.toPath(), "hadoop-unjar", + directoryPermissions()).toFile(); + ensureDirectory(workDir); + return workDir; + } + + private static FileAttribute<?> directoryPermissions() throws IOException { + Set<String> views = FileSystems.getDefault().supportedFileAttributeViews(); + if (views.contains("posix")) { + return PosixFilePermissions + .asFileAttribute(PosixFilePermissions.fromString("rwx------")); + } else if (views.contains("acl")) { + return userOnly(); + } else { + throw new IOException("unrecognized FileSystem type " + + FileSystems.getDefault()); + } + } + + private static FileAttribute<?> userOnly() throws IOException { + UserPrincipal user = + FileSystems.getDefault() + .getUserPrincipalLookupService() + .lookupPrincipalByName(System.getProperty("user.name")); + List<AclEntry> acl = + Collections.singletonList(AclEntry.newBuilder() + .setType(ALLOW) + .setPrincipal(user) + .setPermissions(EnumSet.allOf(AclEntryPermission.class)) + .setFlags(DIRECTORY_INHERIT, FILE_INHERIT) + .build()); + return + new FileAttribute<List<AclEntry>>() { + @Override + public String name() { + return "acl:acl"; + } + + @Override + public List<AclEntry> value() { + return acl; + } + }; + } + /** * Creates a classloader based on the environment that was specified by the * user. If HADOOP_USE_CLIENT_CLASSLOADER is specified, it creates an diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java index 7b1e031360a..39a4e72e958 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java @@ -20,6 +20,7 @@ import static org.apache.hadoop.util.RunJar.MATCH_ANY; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; @@ -197,6 +198,24 @@ private File getUnjarDir(String dirName) { return unjarDir; } + /** + * Tests the creation of the temp working directory into which the jars are + * unjarred. + */ + @Test + public void testCreateWorkDirectory() throws Exception { + File workDir = null; + try { + workDir = RunJar.createWorkDirectory(); + + assertNotNull(workDir, "Work directory should exist and not null"); + } finally { + if (workDir != null) { + FileUtil.fullyDelete(workDir); + } + } + } + /** * Tests the client classloader to verify the main class and its dependent * class are loaded correctly by the application classloader, and others are --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org