Author: suresh
Date: Fri May 24 05:12:56 2013
New Revision: 1485932
URL: http://svn.apache.org/r1485932
Log:
HADOOP-8562. Merge r1459586 for HDFS-4615, r1459592 for HDFS-4584, r1459643,
r1459642 for HADOOP-9387, r1460086 for HADOOP-9353
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1485932&r1=1485931&r2=1485932&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
Fri May 24 05:12:56 2013
@@ -319,7 +319,13 @@ Release 2.0.5-beta - UNRELEASED
Windows with NTFS ACLs. (Chris Nauroth via suresh)
HADOOP-9388. TestFsShellCopy fails on Windows. (Ivan Mitic via suresh)
-
+
+ HADOOP-9387. Fix DF so that it won't execute a shell command on Windows
+ to compute the file system/mount point. (Ivan Mitic via szetszwo)
+
+ HADOOP-9353. Activate native-win maven profile by default on Windows.
+ (Arpit Agarwal via szetszwo)
+
Release 2.0.4-beta - UNRELEASED
INCOMPATIBLE CHANGES
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml?rev=1485932&r1=1485931&r2=1485932&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml
(original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml
Fri May 24 05:12:56 2013
@@ -557,7 +557,9 @@
<profile>
<id>native-win</id>
<activation>
- <activeByDefault>false</activeByDefault>
+ <os>
+ <family>Windows</family>
+ </os>
</activation>
<build>
<plugins>
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java?rev=1485932&r1=1485931&r2=1485932&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
Fri May 24 05:12:56 2013
@@ -50,37 +50,6 @@ public class DF extends Shell {
private ArrayList<String> output;
- enum OSType {
- OS_TYPE_UNIX("UNIX"),
- OS_TYPE_WIN("Windows"),
- OS_TYPE_SOLARIS("SunOS"),
- OS_TYPE_MAC("Mac"),
- OS_TYPE_AIX("AIX");
-
- private String id;
- OSType(String id) {
- this.id = id;
- }
- public boolean match(String osStr) {
- return osStr != null && osStr.indexOf(id) >= 0;
- }
- String getId() {
- return id;
- }
- }
-
- private static final String OS_NAME = System.getProperty("os.name");
- private static final OSType OS_TYPE = getOSType(OS_NAME);
-
- protected static OSType getOSType(String osName) {
- for (OSType ost : EnumSet.allOf(OSType.class)) {
- if (ost.match(osName)) {
- return ost;
- }
- }
- return OSType.OS_TYPE_UNIX;
- }
-
public DF(File path, Configuration conf) throws IOException {
this(path, conf.getLong(CommonConfigurationKeys.FS_DF_INTERVAL_KEY,
DF.DF_INTERVAL_DEFAULT));
}
@@ -92,10 +61,6 @@ public class DF extends Shell {
this.output = new ArrayList<String>();
}
- protected OSType getOSType() {
- return OS_TYPE;
- }
-
/// ACCESSORS
/** @return the canonical path to the volume we're checking. */
@@ -105,8 +70,13 @@ public class DF extends Shell {
/** @return a string indicating which filesystem volume we're checking. */
public String getFilesystem() throws IOException {
- run();
- return filesystem;
+ if (Shell.WINDOWS) {
+ this.filesystem = dirFile.getCanonicalPath().substring(0, 2);
+ return this.filesystem;
+ } else {
+ run();
+ return filesystem;
+ }
}
/** @return the capacity of the measured filesystem in bytes. */
@@ -138,16 +108,23 @@ public class DF extends Shell {
throw new FileNotFoundException("Specified path " + dirFile.getPath()
+ "does not exist");
}
- run();
- // Skip parsing if df was not successful
- if (getExitCode() != 0) {
- StringBuffer sb = new StringBuffer("df could not be run successfully: ");
- for (String line: output) {
- sb.append(line);
+
+ if (Shell.WINDOWS) {
+ // Assume a drive letter for a mount point
+ this.mount = dirFile.getCanonicalPath().substring(0, 2);
+ } else {
+ run();
+ // Skip parsing if df was not successful
+ if (getExitCode() != 0) {
+ StringBuffer sb = new StringBuffer("df could not be run successfully:
");
+ for (String line: output) {
+ sb.append(line);
+ }
+ throw new IOException(sb.toString());
}
- throw new IOException(sb.toString());
+ parseOutput();
}
- parseOutput();
+
return mount;
}
@@ -164,23 +141,15 @@ public class DF extends Shell {
}
@Override
- protected void run() throws IOException {
- if (WINDOWS) {
- try {
- this.mount = dirFile.getCanonicalPath().substring(0,2);
- } catch (IOException e) {
- }
- return;
- }
- super.run();
- }
-
- @Override
protected String[] getExecString() {
// ignoring the error since the exit code it enough
- return (WINDOWS)? new String[]{"cmd", "/c", "df -k " + dirPath + " 2>nul"}:
- new String[] {"bash","-c","exec 'df' '-k' '-P' '" + dirPath
+ if (Shell.WINDOWS){
+ throw new AssertionError(
+ "DF.getExecString() should never be called on Windows");
+ } else {
+ return new String[] {"bash","-c","exec 'df' '-k' '-P' '" + dirPath
+ "' 2>/dev/null"};
+ }
}
@Override
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java?rev=1485932&r1=1485931&r2=1485932&view=diff
==============================================================================
---
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
(original)
+++
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
Fri May 24 05:12:56 2013
@@ -29,21 +29,17 @@ import java.util.EnumSet;
import java.util.Random;
import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.Shell;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestDFVariations {
public static class XXDF extends DF {
- private final String osName;
- public XXDF(String osName) throws IOException {
+ public XXDF() throws IOException {
super(new File(System.getProperty("test.build.data","/tmp")), 0L);
- this.osName = osName;
- }
- @Override
- public DF.OSType getOSType() {
- return DF.getOSType(osName);
}
+
@Override
protected String[] getExecString() {
return new String[] { "echo", "IGNORE\n",
@@ -51,14 +47,20 @@ public class TestDFVariations {
}
}
- @Test(timeout=5000)
- public void testOSParsing() throws Exception {
- for (DF.OSType ost : EnumSet.allOf(DF.OSType.class)) {
- XXDF df = new XXDF(ost.getId());
- assertEquals(ost.getId() + " mount", "/foo/bar", df.getMount());
- }
+ public void testMountAndFileSystem() throws Exception {
+ XXDF df = new XXDF();
+ String expectedMount =
+ Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/foo/bar";
+ String expectedFileSystem =
+ Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/dev/sda3";
+
+ assertEquals("Invalid mount point",
+ expectedMount, df.getMount());
+
+ assertEquals("Invalid filesystem",
+ expectedFileSystem, df.getFilesystem());
}
-
+
@Test(timeout=5000)
public void testDFInvalidPath() throws Exception {
// Generate a path that doesn't exist