Author: suresh
Date: Fri May 24 22:20:55 2013
New Revision: 1486238
URL: http://svn.apache.org/r1486238
Log:
HADOOP-8562. Merge r1477376 for HADOOP-9413, r1476877 for HDFS-4734, r1476856
for HADOOP-9524.
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TemporarySocketDirectory.java
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
Fri May 24 22:20:55 2013
@@ -342,6 +342,12 @@ Release 2.0.5-beta - UNRELEASED
HADOOP-9490. LocalFileSystem#reportChecksumFailure not closing the
checksum file handle before rename. (Ivan Mitic via suresh)
+ HADOOP-9524. Fix ShellCommandFencer to work on Windows.
+ (Arpit Agarwal via suresh)
+
+ HADOOP-9413. Add common utils for File#setReadable/Writable/Executable &
+ File#canRead/Write/Execute that work cross-platform. (Ivan Mitic via
suresh)
+
HADOOP-9488. FileUtil#createJarWithClassPath only substitutes environment
variables from current process environment/does not support overriding
when launching new process (Chris Nauroth via bikas)
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
Fri May 24 22:20:55 2013
@@ -41,7 +41,6 @@ import org.apache.hadoop.io.nativeio.Nat
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -145,9 +144,9 @@ public class FileUtil {
* Pure-Java implementation of "chmod +rwx f".
*/
private static void grantPermissions(final File f) {
- f.setExecutable(true);
- f.setReadable(true);
- f.setWritable(true);
+ FileUtil.setExecutable(f, true);
+ FileUtil.setReadable(f, true);
+ FileUtil.setWritable(f, true);
}
private static boolean deleteImpl(final File f, final boolean doLog) {
@@ -780,6 +779,129 @@ public class FileUtil {
}
/**
+ * Platform independent implementation for {@link File#setReadable(boolean)}
+ * File#setReadable does not work as expected on Windows.
+ * @param f input file
+ * @param readable
+ * @return true on success, false otherwise
+ */
+ public static boolean setReadable(File f, boolean readable) {
+ if (Shell.WINDOWS) {
+ try {
+ String permission = readable ? "u+r" : "u-r";
+ FileUtil.chmod(f.getCanonicalPath(), permission, false);
+ return true;
+ } catch (IOException ex) {
+ return false;
+ }
+ } else {
+ return f.setReadable(readable);
+ }
+ }
+
+ /**
+ * Platform independent implementation for {@link File#setWritable(boolean)}
+ * File#setWritable does not work as expected on Windows.
+ * @param f input file
+ * @param writable
+ * @return true on success, false otherwise
+ */
+ public static boolean setWritable(File f, boolean writable) {
+ if (Shell.WINDOWS) {
+ try {
+ String permission = writable ? "u+w" : "u-w";
+ FileUtil.chmod(f.getCanonicalPath(), permission, false);
+ return true;
+ } catch (IOException ex) {
+ return false;
+ }
+ } else {
+ return f.setWritable(writable);
+ }
+ }
+
+ /**
+ * Platform independent implementation for {@link
File#setExecutable(boolean)}
+ * File#setExecutable does not work as expected on Windows.
+ * Note: revoking execute permission on folders does not have the same
+ * behavior on Windows as on Unix platforms. Creating, deleting or renaming
+ * a file within that folder will still succeed on Windows.
+ * @param f input file
+ * @param executable
+ * @return true on success, false otherwise
+ */
+ public static boolean setExecutable(File f, boolean executable) {
+ if (Shell.WINDOWS) {
+ try {
+ String permission = executable ? "u+x" : "u-x";
+ FileUtil.chmod(f.getCanonicalPath(), permission, false);
+ return true;
+ } catch (IOException ex) {
+ return false;
+ }
+ } else {
+ return f.setExecutable(executable);
+ }
+ }
+
+ /**
+ * Platform independent implementation for {@link File#canRead()}
+ * @param f input file
+ * @return On Unix, same as {@link File#canRead()}
+ * On Windows, true if process has read access on the path
+ */
+ public static boolean canRead(File f) {
+ if (Shell.WINDOWS) {
+ try {
+ return NativeIO.Windows.access(f.getCanonicalPath(),
+ NativeIO.Windows.AccessRight.ACCESS_READ);
+ } catch (IOException e) {
+ return false;
+ }
+ } else {
+ return f.canRead();
+ }
+ }
+
+ /**
+ * Platform independent implementation for {@link File#canWrite()}
+ * @param f input file
+ * @return On Unix, same as {@link File#canWrite()}
+ * On Windows, true if process has write access on the path
+ */
+ public static boolean canWrite(File f) {
+ if (Shell.WINDOWS) {
+ try {
+ return NativeIO.Windows.access(f.getCanonicalPath(),
+ NativeIO.Windows.AccessRight.ACCESS_WRITE);
+ } catch (IOException e) {
+ return false;
+ }
+ } else {
+ return f.canWrite();
+ }
+ }
+
+ /**
+ * Platform independent implementation for {@link File#canExecute()}
+ * @param f input file
+ * @return On Unix, same as {@link File#canExecute()}
+ * On Windows, true if process has execute access on the path
+ */
+ public static boolean canExecute(File f) {
+ if (Shell.WINDOWS) {
+ try {
+ return NativeIO.Windows.access(f.getCanonicalPath(),
+ NativeIO.Windows.AccessRight.ACCESS_EXECUTE);
+ } catch (IOException e) {
+ return false;
+ }
+ } else {
+ return f.canExecute();
+ }
+ }
+
+ /**
* Set permissions to the required value. Uses the java primitives instead
* of forking if group == other.
* @param f the file to change
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
Fri May 24 22:20:55 2013
@@ -103,7 +103,8 @@ public class LocalFileSystem extends Che
String device = new DF(f, getConf()).getMount();
File parent = f.getParentFile();
File dir = null;
- while (parent!=null && parent.canWrite() &&
parent.toString().startsWith(device)) {
+ while (parent != null && FileUtil.canWrite(parent) &&
+ parent.toString().startsWith(device)) {
dir = parent;
parent = parent.getParentFile();
}
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
Fri May 24 22:20:55 2013
@@ -356,6 +356,43 @@ public class NativeIO {
/** Windows only methods used for getOwner() implementation */
private static native String getOwner(FileDescriptor fd) throws
IOException;
+ /** Supported list of Windows access right flags */
+ public static enum AccessRight {
+ ACCESS_READ (0x0001), // FILE_READ_DATA
+ ACCESS_WRITE (0x0002), // FILE_WRITE_DATA
+ ACCESS_EXECUTE (0x0020); // FILE_EXECUTE
+
+ private final int accessRight;
+ AccessRight(int access) {
+ accessRight = access;
+ }
+
+ public int accessRight() {
+ return accessRight;
+ }
+ };
+
+ /** Windows only method used to check if the current process has requested
+ * access rights on the given path. */
+ private static native boolean access0(String path, int requestedAccess);
+
+ /**
+ * Checks whether the current process has desired access rights on
+ * the given path.
+ *
+ * Longer term this native function can be substituted with JDK7
+ * function Files#isReadable, isWritable, isExecutable.
+ *
+ * @param path input path
+ * @param desiredAccess ACCESS_READ, ACCESS_WRITE or ACCESS_EXECUTE
+ * @return true if access is allowed
+ * @throws IOException I/O exception on error
+ */
+ public static boolean access(String path, AccessRight desiredAccess)
+ throws IOException {
+ return access0(path, desiredAccess.accessRight());
+ }
+
static {
if (NativeCodeLoader.isNativeCodeLoaded()) {
try {
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
Fri May 24 22:20:55 2013
@@ -23,6 +23,7 @@ import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
@@ -160,11 +161,7 @@ public class DiskChecker {
+ dir.toString());
}
- if (Shell.WINDOWS) {
- checkAccessByFileSystemInteraction(dir);
- } else {
- checkAccessByFileMethods(dir);
- }
+ checkAccessByFileMethods(dir);
}
/**
@@ -177,68 +174,19 @@ public class DiskChecker {
*/
private static void checkAccessByFileMethods(File dir)
throws DiskErrorException {
- if (!dir.canRead()) {
+ if (!FileUtil.canRead(dir)) {
throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
}
- if (!dir.canWrite()) {
+ if (!FileUtil.canWrite(dir)) {
throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
}
- if (!dir.canExecute()) {
+ if (!FileUtil.canExecute(dir)) {
throw new DiskErrorException("Directory is not executable: "
+ dir.toString());
}
}
-
- /**
- * Checks that the current running process can read, write, and execute the
- * given directory by attempting each of those operations on the file system.
- * This method contains several workarounds to known JVM bugs that cause
- * File.canRead, File.canWrite, and File.canExecute to return incorrect
results
- * on Windows with NTFS ACLs. See:
- * http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6203387
- * These bugs are supposed to be fixed in JDK7.
- *
- * @param dir File to check
- * @throws DiskErrorException if dir is not readable, not writable, or not
- * executable
- */
- private static void checkAccessByFileSystemInteraction(File dir)
- throws DiskErrorException {
- // Make sure we can read the directory by listing it.
- if (dir.list() == null) {
- throw new DiskErrorException("Directory is not readable: "
- + dir.toString());
- }
-
- // Make sure we can write to the directory by creating a temp file in it.
- try {
- File tempFile = File.createTempFile("checkDirAccess", null, dir);
- if (!tempFile.delete()) {
- throw new DiskErrorException("Directory is not writable: "
- + dir.toString());
- }
- } catch (IOException e) {
- throw new DiskErrorException("Directory is not writable: "
- + dir.toString(), e);
- }
-
- // Make sure the directory is executable by trying to cd into it. This
- // launches a separate process. It does not change the working directory
of
- // the current process.
- try {
- String[] cdCmd = new String[] { "cmd", "/C", "cd",
- dir.getAbsolutePath() };
- Shell.execCommand(null, cdCmd, SHELL_TIMEOUT);
- } catch (Shell.ExitCodeException e) {
- throw new DiskErrorException("Directory is not executable: "
- + dir.toString(), e);
- } catch (IOException e) {
- throw new DiskErrorException("Directory is not executable: "
- + dir.toString(), e);
- }
- }
}
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
Fri May 24 22:20:55 2013
@@ -812,6 +812,42 @@ cleanup:
#endif
}
+/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method: access0
+ * Signature: (Ljava/lang/String;I)Z
+ */
+JNIEXPORT jboolean JNICALL
Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_access0
+ (JNIEnv *env, jclass clazz, jstring jpath, jint jaccess)
+{
+#ifdef UNIX
+ THROW(env, "java/io/IOException",
+ "The function access0(path, access) is not supported on Unix");
+ return NULL;
+#endif
+
+#ifdef WINDOWS
+ LPCWSTR path = NULL;
+ DWORD dwRtnCode = ERROR_SUCCESS;
+ ACCESS_MASK access = (ACCESS_MASK)jaccess;
+ BOOL allowed = FALSE;
+
+ path = (LPCWSTR) (*env)->GetStringChars(env, jpath, NULL);
+ if (!path) goto cleanup; // exception was thrown
+
+ dwRtnCode = CheckAccessForCurrentUser(path, access, &allowed);
+ if (dwRtnCode != ERROR_SUCCESS) {
+ throw_ioe(env, dwRtnCode);
+ goto cleanup;
+ }
+
+cleanup:
+ if (path) (*env)->ReleaseStringChars(env, jpath, path);
+
+ return (jboolean)allowed;
+#endif
+}
+
JNIEXPORT void JNICALL
Java_org_apache_hadoop_io_nativeio_NativeIO_renameTo0(JNIEnv *env,
jclass clazz, jstring jsrc, jstring jdst)
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
Fri May 24 22:20:55 2013
@@ -110,6 +110,11 @@ void SystemInfoUsage();
DWORD GetFileInformationByName(__in LPCWSTR pathName, __in BOOL followLink,
__out LPBY_HANDLE_FILE_INFORMATION lpFileInformation);
+DWORD CheckAccessForCurrentUser(
+ __in PCWSTR pathName,
+ __in ACCESS_MASK requestedAccess,
+ __out BOOL *allowed);
+
DWORD ConvertToLongPath(__in PCWSTR path, __deref_out PWSTR *newPath);
DWORD GetSidFromAcctNameW(__in PCWSTR acctName, __out PSID* ppSid);
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
Fri May 24 22:20:55 2013
@@ -567,7 +567,7 @@ static DWORD GetEffectiveRightsForSid(PS
PSID pSid,
PACCESS_MASK pAccessRights)
{
- AUTHZ_RESOURCE_MANAGER_HANDLE hManager;
+ AUTHZ_RESOURCE_MANAGER_HANDLE hManager = NULL;
LUID unusedId = { 0 };
AUTHZ_CLIENT_CONTEXT_HANDLE hAuthzClientContext = NULL;
DWORD dwRtnCode = ERROR_SUCCESS;
@@ -581,6 +581,10 @@ static DWORD GetEffectiveRightsForSid(PS
return GetLastError();
}
+ // Pass AUTHZ_SKIP_TOKEN_GROUPS to the function to avoid querying user group
+ // information for access check. This allows us to model POSIX permissions
+ // on Windows, where a user can have less permissions than a group it
+ // belongs to.
if(!AuthzInitializeContextFromSid(AUTHZ_SKIP_TOKEN_GROUPS,
pSid, hManager, NULL, unusedId, NULL, &hAuthzClientContext))
{
@@ -594,17 +598,116 @@ static DWORD GetEffectiveRightsForSid(PS
ret = dwRtnCode;
goto GetEffectiveRightsForSidEnd;
}
- if (!AuthzFreeContext(hAuthzClientContext))
+
+GetEffectiveRightsForSidEnd:
+ if (hManager != NULL)
{
- ret = GetLastError();
- goto GetEffectiveRightsForSidEnd;
+ (void)AuthzFreeResourceManager(hManager);
+ }
+ if (hAuthzClientContext != NULL)
+ {
+ (void)AuthzFreeContext(hAuthzClientContext);
}
-GetEffectiveRightsForSidEnd:
return ret;
}
//----------------------------------------------------------------------------
+// Function: CheckAccessForCurrentUser
+//
+// Description:
+// Checks if the current process has the requested access rights on the given
+// path. Based on the following MSDN article:
+//
http://msdn.microsoft.com/en-us/library/windows/desktop/ff394771(v=vs.85).aspx
+//
+// Returns:
+// ERROR_SUCCESS: on success
+//
+DWORD CheckAccessForCurrentUser(
+ __in PCWSTR pathName,
+ __in ACCESS_MASK requestedAccess,
+ __out BOOL *allowed)
+{
+ DWORD dwRtnCode = ERROR_SUCCESS;
+
+ LPWSTR longPathName = NULL;
+ HANDLE hProcessToken = NULL;
+ PSECURITY_DESCRIPTOR pSd = NULL;
+
+ AUTHZ_RESOURCE_MANAGER_HANDLE hManager = NULL;
+ AUTHZ_CLIENT_CONTEXT_HANDLE hAuthzClientContext = NULL;
+ LUID Luid = {0, 0};
+
+ ACCESS_MASK currentUserAccessRights = 0;
+
+ // Prepend the long path prefix if needed
+ dwRtnCode = ConvertToLongPath(pathName, &longPathName);
+ if (dwRtnCode != ERROR_SUCCESS)
+ {
+ goto CheckAccessEnd;
+ }
+
+ // Get SD of the given path. OWNER and DACL security info must be
+ // requested, otherwise, AuthzAccessCheck fails with invalid parameter
+ // error.
+ dwRtnCode = GetNamedSecurityInfo(longPathName, SE_FILE_OBJECT,
+ OWNER_SECURITY_INFORMATION | GROUP_SECURITY_INFORMATION |
+ DACL_SECURITY_INFORMATION,
+ NULL, NULL, NULL, NULL, &pSd);
+ if (dwRtnCode != ERROR_SUCCESS)
+ {
+ goto CheckAccessEnd;
+ }
+
+ // Get current process token
+ if (!OpenProcessToken(GetCurrentProcess(), TOKEN_QUERY, &hProcessToken))
+ {
+ dwRtnCode = GetLastError();
+ goto CheckAccessEnd;
+ }
+
+ if (!AuthzInitializeResourceManager(AUTHZ_RM_FLAG_NO_AUDIT, NULL, NULL,
+ NULL, NULL, &hManager))
+ {
+ dwRtnCode = GetLastError();
+ goto CheckAccessEnd;
+ }
+
+ if(!AuthzInitializeContextFromToken(0, hProcessToken, hManager, NULL,
+ Luid, NULL, &hAuthzClientContext))
+ {
+ dwRtnCode = GetLastError();
+ goto CheckAccessEnd;
+ }
+
+ dwRtnCode = GetAccess(hAuthzClientContext, pSd, ¤tUserAccessRights);
+ if (dwRtnCode != ERROR_SUCCESS)
+ {
+ goto CheckAccessEnd;
+ }
+
+ *allowed = ((currentUserAccessRights & requestedAccess) == requestedAccess);
+
+CheckAccessEnd:
+ LocalFree(longPathName);
+ LocalFree(pSd);
+ if (hProcessToken != NULL)
+ {
+ CloseHandle(hProcessToken);
+ }
+ if (hManager != NULL)
+ {
+ (void)AuthzFreeResourceManager(hManager);
+ }
+ if (hAuthzClientContext != NULL)
+ {
+ (void)AuthzFreeContext(hAuthzClientContext);
+ }
+
+ return dwRtnCode;
+}
+
+//----------------------------------------------------------------------------
// Function: FindFileOwnerAndPermission
//
// Description:
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
Fri May 24 22:20:55 2013
@@ -353,15 +353,15 @@ public class TestFileUtil {
}
private static void grantPermissions(final File f) {
- f.setReadable(true);
- f.setWritable(true);
- f.setExecutable(true);
+ FileUtil.setReadable(f, true);
+ FileUtil.setWritable(f, true);
+ FileUtil.setExecutable(f, true);
}
private static void revokePermissions(final File f) {
- f.setWritable(false);
- f.setExecutable(false);
- f.setReadable(false);
+ FileUtil.setWritable(f, false);
+ FileUtil.setExecutable(f, false);
+ FileUtil.setReadable(f, false);
}
// Validates the return value.
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
Fri May 24 22:20:55 2013
@@ -61,7 +61,7 @@ public class TestLocalFileSystem {
@After
public void after() throws IOException {
- base.setWritable(true);
+ FileUtil.setWritable(base, true);
FileUtil.fullyDelete(base);
assertTrue(!base.exists());
}
@@ -279,7 +279,7 @@ public class TestLocalFileSystem {
final File dir1 = new File(base, "dir1");
final File dir2 = new File(dir1, "dir2");
dir2.mkdirs();
- assertTrue(dir2.exists() && dir2.canWrite());
+ assertTrue(dir2.exists() && FileUtil.canWrite(dir2));
final String dataFileName = "corruptedData";
final Path dataPath = new Path(new File(dir2, dataFileName).toURI());
@@ -302,7 +302,7 @@ public class TestLocalFileSystem {
// this is a hack to force the #reportChecksumFailure() method to stop
// climbing up at the 'base' directory and use 'dir1/bad_files' as the
// corrupted files storage:
- base.setWritable(false);
+ FileUtil.setWritable(base, false);
FSDataInputStream dataFsdis = fileSys.open(dataPath);
FSDataInputStream checksumFsdis = fileSys.open(checksumPath);
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
Fri May 24 22:20:55 2013
@@ -240,6 +240,44 @@ public class TestNativeIO {
}
+ /** Validate access checks on Windows */
+ @Test (timeout = 30000)
+ public void testAccess() throws Exception {
+ if (!Path.WINDOWS) {
+ return;
+ }
+
+ File testFile = new File(TEST_DIR, "testfileaccess");
+ assertTrue(testFile.createNewFile());
+
+ // Validate ACCESS_READ
+ FileUtil.setReadable(testFile, false);
+ assertFalse(NativeIO.Windows.access(testFile.getAbsolutePath(),
+ NativeIO.Windows.AccessRight.ACCESS_READ));
+
+ FileUtil.setReadable(testFile, true);
+ assertTrue(NativeIO.Windows.access(testFile.getAbsolutePath(),
+ NativeIO.Windows.AccessRight.ACCESS_READ));
+
+ // Validate ACCESS_WRITE
+ FileUtil.setWritable(testFile, false);
+ assertFalse(NativeIO.Windows.access(testFile.getAbsolutePath(),
+ NativeIO.Windows.AccessRight.ACCESS_WRITE));
+
+ FileUtil.setWritable(testFile, true);
+ assertTrue(NativeIO.Windows.access(testFile.getAbsolutePath(),
+ NativeIO.Windows.AccessRight.ACCESS_WRITE));
+
+ // Validate ACCESS_EXECUTE
+ FileUtil.setExecutable(testFile, false);
+ assertFalse(NativeIO.Windows.access(testFile.getAbsolutePath(),
+ NativeIO.Windows.AccessRight.ACCESS_EXECUTE));
+
+ FileUtil.setExecutable(testFile, true);
+ assertTrue(NativeIO.Windows.access(testFile.getAbsolutePath(),
+ NativeIO.Windows.AccessRight.ACCESS_EXECUTE));
+ }
+
@Test (timeout = 30000)
public void testOpenMissingWithoutCreate() throws Exception {
if (Path.WINDOWS) {
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TemporarySocketDirectory.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TemporarySocketDirectory.java?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TemporarySocketDirectory.java
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TemporarySocketDirectory.java
Fri May 24 22:20:55 2013
@@ -23,6 +23,7 @@ import java.io.IOException;
import java.util.Random;
import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.fs.FileUtil;
/**
* Create a temporary directory in which sockets can be created.
@@ -37,7 +38,7 @@ public class TemporarySocketDirectory im
dir = new File(tmp, "socks." + (System.currentTimeMillis() +
"." + (new Random().nextInt())));
dir.mkdirs();
- dir.setWritable(true, true);
+ FileUtil.setWritable(dir, true);
}
public File getDir() {
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java?rev=1486238&r1=1486237&r2=1486238&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java
Fri May 24 22:20:55 2013
@@ -28,6 +28,8 @@ import java.lang.management.ManagementFa
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
+import org.apache.hadoop.fs.FileUtil;
+
public class TestShell extends TestCase {
private static class Command extends Shell {
@@ -92,7 +94,7 @@ public class TestShell extends TestCase
PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
writer.println(timeoutCommand);
writer.close();
- shellFile.setExecutable(true);
+ FileUtil.setExecutable(shellFile, true);
Shell.ShellCommandExecutor shexc
= new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()},
null, null, 100);