Author: todd
Date: Wed Jan 23 18:51:24 2013
New Revision: 1437623
URL: http://svn.apache.org/viewvc?rev=1437623&view=rev
Log:
Merge trunk into branch
Added:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
- copied unchanged from r1437619,
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/resources/common-version-info.properties
- copied unchanged from r1437619,
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/resources/common-version-info.properties
Removed:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/dev-support/saveVersion.sh
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/HadoopVersionAnnotation.java
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt
(contents, props changed)
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/pom.xml
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/bin/hadoop
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/docs/
(props changed)
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/service_level_auth.xml
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/
(props changed)
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/core/
(props changed)
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt
Wed Jan 23 18:51:24 2013
@@ -146,6 +146,9 @@ Trunk (Unreleased)
HADOOP-9162. Add utility to check native library availability.
(Binglin Chang via suresh)
+ HADOOP-8924. Add maven plugin alternative to shell script to save
+ package-info.java. (Chris Nauroth via suresh)
+
BUG FIXES
HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang)
@@ -326,6 +329,8 @@ Release 2.0.3-alpha - Unreleased
INCOMPATIBLE CHANGES
+ HADOOP-8999. SASL negotiation is flawed (daryn)
+
NEW FEATURES
HADOOP-8597. Permit FsShell's text command to read Avro files.
@@ -436,6 +441,18 @@ Release 2.0.3-alpha - Unreleased
HADOOP-9192. Move token related request/response messages to common.
(suresh)
+ HADOOP-8712. Change default hadoop.security.group.mapping to
+ JniBasedUnixGroupsNetgroupMappingWithFallback (Robert Parker via todd)
+
+ HADOOP-9106. Allow configuration of IPC connect timeout.
+ (Rober Parker via suresh)
+
+ HADOOP-9216. CompressionCodecFactory#getCodecClasses should trim the
+ result of parsing by Configuration. (Tsuyoshi Ozawa via todd)
+
+ HADOOP-9231. Parametrize staging URL for the uniformity of
+ distributionManagement. (Konstantin Boudnik via suresh)
+
OPTIMIZATIONS
HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang
@@ -496,8 +513,6 @@ Release 2.0.3-alpha - Unreleased
HADOOP-7115. Add a cache for getpwuid_r and getpwgid_r calls (tucu)
- HADOOP-8999. SASL negotiation is flawed (daryn)
-
HADOOP-6607. Add different variants of non caching HTTP headers. (tucu)
HADOOP-9049. DelegationTokenRenewer needs to be Singleton and FileSystems
@@ -543,6 +558,20 @@ Release 2.0.3-alpha - Unreleased
HADOOP-9203. RPCCallBenchmark should find a random available port.
(Andrew Purtell via suresh)
+ HADOOP-9178. src/main/conf is missing hadoop-policy.xml.
+ (Sandy Ryza via eli)
+
+ HADOOP-8816. HTTP Error 413 full HEAD if using kerberos authentication.
+ (moritzmoeller via tucu)
+
+ HADOOP-9212. Potential deadlock in FileSystem.Cache/IPC/UGI. (tomwhite)
+
+ HADOOP-9193. hadoop script can inadvertently expand wildcard arguments
+ when delegating to hdfs script. (Andy Isaacson via todd)
+
+ HADOOP-9215. when using cmake-2.6, libhadoop.so doesn't get created
+ (only libhadoop.so.1.0.0) (Colin Patrick McCabe via todd)
+
Release 2.0.2-alpha - 2012-09-07
INCOMPATIBLE CHANGES
@@ -1233,6 +1262,21 @@ Release 2.0.0-alpha - 05-23-2012
HADOOP-8655. Fix TextInputFormat for large deliminators. (Gelesh via
bobby)
+Release 0.23.7 - UNRELEASED
+
+ INCOMPATIBLE CHANGES
+
+ NEW FEATURES
+
+ IMPROVEMENTS
+
+ HADOOP-8849. FileUtil#fullyDelete should grant the target directories +rwx
+ permissions (Ivan A. Veselovsky via bobby)
+
+ OPTIMIZATIONS
+
+ BUG FIXES
+
Release 0.23.6 - UNRELEASED
INCOMPATIBLE CHANGES
@@ -1240,6 +1284,8 @@ Release 0.23.6 - UNRELEASED
NEW FEATURES
IMPROVEMENTS
+ HADOOP-9217. Print thread dumps when hadoop-common tests fail.
+ (Andrey Klochkov via suresh)
OPTIMIZATIONS
@@ -1258,7 +1304,8 @@ Release 0.23.6 - UNRELEASED
HADOOP-9097. Maven RAT plugin is not checking all source files (tgraves)
-Release 0.23.5 - UNRELEASED
+Release 0.23.5 - 2012-11-28
+
INCOMPATIBLE CHANGES
Propchange:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1433249-1437619
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/pom.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/pom.xml?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/pom.xml
(original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/pom.xml
Wed Jan 23 18:51:24 2013
@@ -244,8 +244,52 @@
</dependencies>
<build>
+ <!--
+ Include all files in src/main/resources. By default, do not apply property
+ substitution (filtering=false), but do apply property substitution to
+ common-version-info.properties (filtering=true). This will substitute the
+ version information correctly, but prevent Maven from altering other files
+ like core-default.xml.
+ -->
+ <resources>
+ <resource>
+ <directory>${basedir}/src/main/resources</directory>
+ <excludes>
+ <exclude>common-version-info.properties</exclude>
+ </excludes>
+ <filtering>false</filtering>
+ </resource>
+ <resource>
+ <directory>${basedir}/src/main/resources</directory>
+ <includes>
+ <include>common-version-info.properties</include>
+ </includes>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
<plugins>
<plugin>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-maven-plugins</artifactId>
+ <executions>
+ <execution>
+ <id>version-info</id>
+ <goals>
+ <goal>version-info</goal>
+ </goals>
+ <configuration>
+ <source>
+ <directory>${basedir}/src/main</directory>
+ <includes>
+ <include>java/**/*.java</include>
+ <include>proto/**/*.proto</include>
+ </includes>
+ </source>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
@@ -289,22 +333,6 @@
</configuration>
</execution>
<execution>
- <id>save-version</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>run</goal>
- </goals>
- <configuration>
- <target>
- <mkdir
dir="${project.build.directory}/generated-sources/java"/>
- <exec executable="sh">
- <arg
- line="${basedir}/dev-support/saveVersion.sh
${project.version} ${project.build.directory}/generated-sources/java"/>
- </exec>
- </target>
- </configuration>
- </execution>
- <execution>
<id>generate-test-sources</id>
<phase>generate-test-sources</phase>
<goals>
@@ -453,6 +481,18 @@
</excludes>
</configuration>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <properties>
+ <property>
+ <name>listener</name>
+ <value>org.apache.hadoop.test.TimedOutTestsListener</value>
+ </property>
+ </properties>
+ </configuration>
+ </plugin>
</plugins>
</build>
@@ -515,6 +555,9 @@
<exec executable="make"
dir="${project.build.directory}/native" failonerror="true">
<arg line="VERBOSE=1"/>
</exec>
+ <!-- The second make is a workaround for HADOOP-9215. It
can
+ be removed when version 2.6 of cmake is no longer
supported . -->
+ <exec executable="make"
dir="${project.build.directory}/native" failonerror="true"></exec>
</target>
</configuration>
</execution>
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/bin/hadoop
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/bin/hadoop?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/bin/hadoop
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/bin/hadoop
Wed Jan 23 18:51:24 2013
@@ -58,9 +58,9 @@ case $COMMAND in
#try to locate hdfs and if present, delegate to it.
shift
if [ -f "${HADOOP_HDFS_HOME}"/bin/hdfs ]; then
- exec "${HADOOP_HDFS_HOME}"/bin/hdfs ${COMMAND/dfsgroups/groups} $*
+ exec "${HADOOP_HDFS_HOME}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@"
elif [ -f "${HADOOP_PREFIX}"/bin/hdfs ]; then
- exec "${HADOOP_PREFIX}"/bin/hdfs ${COMMAND/dfsgroups/groups} $*
+ exec "${HADOOP_PREFIX}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@"
else
echo "HADOOP_HDFS_HOME not found!"
exit 1
@@ -75,9 +75,9 @@ case $COMMAND in
#try to locate mapred and if present, delegate to it.
shift
if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then
- exec "${HADOOP_MAPRED_HOME}"/bin/mapred ${COMMAND/mrgroups/groups} $*
+ exec "${HADOOP_MAPRED_HOME}"/bin/mapred ${COMMAND/mrgroups/groups} "$@"
elif [ -f "${HADOOP_PREFIX}"/bin/mapred ]; then
- exec "${HADOOP_PREFIX}"/bin/mapred ${COMMAND/mrgroups/groups} $*
+ exec "${HADOOP_PREFIX}"/bin/mapred ${COMMAND/mrgroups/groups} "$@"
else
echo "HADOOP_MAPRED_HOME not found!"
exit 1
Propchange:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
Merged
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1433249-1437619
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/service_level_auth.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/service_level_auth.xml?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/service_level_auth.xml
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/service_level_auth.xml
Wed Jan 23 18:51:24 2013
@@ -117,22 +117,6 @@
namenode to communicate with the namenode.</td>
</tr>
<tr>
- <td><code>security.inter.tracker.protocol.acl</code></td>
- <td>ACL for InterTrackerProtocol, used by the tasktrackers to
- communicate with the jobtracker.</td>
- </tr>
- <tr>
- <td><code>security.job.submission.protocol.acl</code></td>
- <td>ACL for JobSubmissionProtocol, used by job clients to
- communciate with the jobtracker for job submission, querying job
status
- etc.</td>
- </tr>
- <tr>
- <td><code>security.task.umbilical.protocol.acl</code></td>
- <td>ACL for TaskUmbilicalProtocol, used by the map and reduce
- tasks to communicate with the parent tasktracker.</td>
- </tr>
- <tr>
<td><code>security.refresh.policy.protocol.acl</code></td>
<td>ACL for RefreshAuthorizationPolicyProtocol, used by the
dfsadmin and mradmin commands to refresh the security policy
in-effect.
Propchange:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1433249-1437619
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
Wed Jan 23 18:51:24 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.fs;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.http.lib.StaticUserWebFilter;
+import org.apache.hadoop.security.authorize.Service;
/**
* This class contains constants for configuration keys used
@@ -114,7 +115,18 @@ public class CommonConfigurationKeys ext
SECURITY_HA_SERVICE_PROTOCOL_ACL = "security.ha.service.protocol.acl";
public static final String
SECURITY_ZKFC_PROTOCOL_ACL = "security.zkfc.protocol.acl";
-
+ public static final String
+ SECURITY_CLIENT_PROTOCOL_ACL = "security.client.protocol.acl";
+ public static final String SECURITY_CLIENT_DATANODE_PROTOCOL_ACL =
+ "security.client.datanode.protocol.acl";
+ public static final String
+ SECURITY_DATANODE_PROTOCOL_ACL = "security.datanode.protocol.acl";
+ public static final String
+ SECURITY_INTER_DATANODE_PROTOCOL_ACL =
"security.inter.datanode.protocol.acl";
+ public static final String
+ SECURITY_NAMENODE_PROTOCOL_ACL = "security.namenode.protocol.acl";
+ public static final String SECURITY_QJOURNAL_SERVICE_PROTOCOL_ACL =
+ "security.qjournal.service.protocol.acl";
public static final String HADOOP_SECURITY_TOKEN_SERVICE_USE_IP =
"hadoop.security.token.service.use_ip";
public static final boolean HADOOP_SECURITY_TOKEN_SERVICE_USE_IP_DEFAULT =
@@ -191,4 +203,4 @@ public class CommonConfigurationKeys ext
public static final long HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_DEFAULT =
4*60*60; // 4 hours
-}
\ No newline at end of file
+}
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
Wed Jan 23 18:51:24 2013
@@ -173,6 +173,11 @@ public class CommonConfigurationKeysPubl
/** Default value for IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY */
public static final int IPC_CLIENT_CONNECTION_MAXIDLETIME_DEFAULT =
10000; // 10s
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String IPC_CLIENT_CONNECT_TIMEOUT_KEY =
+ "ipc.client.connect.timeout";
+ /** Default value for IPC_CLIENT_CONNECT_TIMEOUT_KEY */
+ public static final int IPC_CLIENT_CONNECT_TIMEOUT_DEFAULT = 20000; //
20s
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_CLIENT_CONNECT_MAX_RETRIES_KEY =
"ipc.client.connect.max.retries";
/** Default value for IPC_CLIENT_CONNECT_MAX_RETRIES_KEY */
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
Wed Jan 23 18:51:24 2013
@@ -87,33 +87,98 @@ public class FileUtil {
* (4) If dir is a normal directory, then dir and all its contents
recursively
* are deleted.
*/
- public static boolean fullyDelete(File dir) {
- if (dir.delete()) {
+ public static boolean fullyDelete(final File dir) {
+ return fullyDelete(dir, false);
+ }
+
+ /**
+ * Delete a directory and all its contents. If
+ * we return false, the directory may be partially-deleted.
+ * (1) If dir is symlink to a file, the symlink is deleted. The file pointed
+ * to by the symlink is not deleted.
+ * (2) If dir is symlink to a directory, symlink is deleted. The directory
+ * pointed to by symlink is not deleted.
+ * (3) If dir is a normal file, it is deleted.
+ * (4) If dir is a normal directory, then dir and all its contents
recursively
+ * are deleted.
+ * @param dir the file or directory to be deleted
+ * @param tryGrantPermissions true if permissions should be modified to
delete a file.
+ * @return true on success false on failure.
+ */
+ public static boolean fullyDelete(final File dir, boolean
tryGrantPermissions) {
+ if (tryGrantPermissions) {
+ // try to chmod +rwx the parent folder of the 'dir':
+ File parent = dir.getParentFile();
+ grantPermissions(parent);
+ }
+ if (deleteImpl(dir, false)) {
// dir is (a) normal file, (b) symlink to a file, (c) empty directory or
// (d) symlink to a directory
return true;
}
-
// handle nonempty directory deletion
- if (!fullyDeleteContents(dir)) {
+ if (!fullyDeleteContents(dir, tryGrantPermissions)) {
return false;
}
- return dir.delete();
+ return deleteImpl(dir, true);
+ }
+
+ /*
+ * Pure-Java implementation of "chmod +rwx f".
+ */
+ private static void grantPermissions(final File f) {
+ f.setExecutable(true);
+ f.setReadable(true);
+ f.setWritable(true);
}
+ private static boolean deleteImpl(final File f, final boolean doLog) {
+ if (f == null) {
+ LOG.warn("null file argument.");
+ return false;
+ }
+ final boolean wasDeleted = f.delete();
+ if (wasDeleted) {
+ return true;
+ }
+ final boolean ex = f.exists();
+ if (doLog && ex) {
+ LOG.warn("Failed to delete file or dir ["
+ + f.getAbsolutePath() + "]: it still exists.");
+ }
+ return !ex;
+ }
+
+ /**
+ * Delete the contents of a directory, not the directory itself. If
+ * we return false, the directory may be partially-deleted.
+ * If dir is a symlink to a directory, all the contents of the actual
+ * directory pointed to by dir will be deleted.
+ */
+ public static boolean fullyDeleteContents(final File dir) {
+ return fullyDeleteContents(dir, false);
+ }
+
/**
* Delete the contents of a directory, not the directory itself. If
* we return false, the directory may be partially-deleted.
* If dir is a symlink to a directory, all the contents of the actual
* directory pointed to by dir will be deleted.
+ * @param tryGrantPermissions if 'true', try grant +rwx permissions to this
+ * and all the underlying directories before trying to delete their contents.
*/
- public static boolean fullyDeleteContents(File dir) {
+ public static boolean fullyDeleteContents(final File dir, final boolean
tryGrantPermissions) {
+ if (tryGrantPermissions) {
+ // to be able to list the dir and delete files from it
+ // we must grant the dir rwx permissions:
+ grantPermissions(dir);
+ }
boolean deletionSucceeded = true;
- File contents[] = dir.listFiles();
+ final File[] contents = dir.listFiles();
if (contents != null) {
for (int i = 0; i < contents.length; i++) {
if (contents[i].isFile()) {
- if (!contents[i].delete()) {// normal file or symlink to another file
+ if (!deleteImpl(contents[i], true)) {// normal file or symlink to
another file
deletionSucceeded = false;
continue; // continue deletion of other files/dirs under dir
}
@@ -121,16 +186,16 @@ public class FileUtil {
// Either directory or symlink to another directory.
// Try deleting the directory as this might be a symlink
boolean b = false;
- b = contents[i].delete();
+ b = deleteImpl(contents[i], false);
if (b){
//this was indeed a symlink or an empty directory
continue;
}
// if not an empty directory or symlink let
// fullydelete handle it.
- if (!fullyDelete(contents[i])) {
+ if (!fullyDelete(contents[i], tryGrantPermissions)) {
deletionSucceeded = false;
- continue; // continue deletion of other files/dirs under dir
+ // continue deletion of other files/dirs under dir
}
}
}
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
Wed Jan 23 18:51:24 2013
@@ -305,6 +305,7 @@ public class HttpServer implements Filte
ret.setAcceptQueueSize(128);
ret.setResolveNames(false);
ret.setUseDirectBuffers(false);
+ ret.setHeaderBufferSize(1024*64);
return ret;
}
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
Wed Jan 23 18:51:24 2013
@@ -122,7 +122,7 @@ public class CompressionCodecFactory {
if (codecsString != null) {
StringTokenizer codecSplit = new StringTokenizer(codecsString, ",");
while (codecSplit.hasMoreElements()) {
- String codecSubstring = codecSplit.nextToken();
+ String codecSubstring = codecSplit.nextToken().trim();
if (codecSubstring.length() != 0) {
try {
Class<?> cls = conf.getClassByName(codecSubstring);
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
Wed Jan 23 18:51:24 2013
@@ -106,6 +106,8 @@ public class Client {
private SocketFactory socketFactory; // how to create sockets
private int refCount = 1;
+
+ private final int connectionTimeout;
final static int PING_CALL_ID = -1;
@@ -159,7 +161,16 @@ public class Client {
}
return -1;
}
-
+ /**
+ * set the connection timeout value in configuration
+ *
+ * @param conf Configuration
+ * @param timeout the socket connect timeout value
+ */
+ public static final void setConnectTimeout(Configuration conf, int timeout) {
+ conf.setInt(CommonConfigurationKeys.IPC_CLIENT_CONNECT_TIMEOUT_KEY,
timeout);
+ }
+
/**
* Increment this client's reference count
*
@@ -494,8 +505,7 @@ public class Client {
}
}
- // connection time out is 20s
- NetUtils.connect(this.socket, server, 20000);
+ NetUtils.connect(this.socket, server, connectionTimeout);
if (rpcTimeout > 0) {
pingInterval = rpcTimeout; // rpcTimeout overwrites pingInterval
}
@@ -1034,6 +1044,8 @@ public class Client {
this.valueClass = valueClass;
this.conf = conf;
this.socketFactory = factory;
+ this.connectionTimeout =
conf.getInt(CommonConfigurationKeys.IPC_CLIENT_CONNECT_TIMEOUT_KEY,
+ CommonConfigurationKeys.IPC_CLIENT_CONNECT_TIMEOUT_DEFAULT);
}
/**
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
Wed Jan 23 18:51:24 2013
@@ -18,10 +18,13 @@
package org.apache.hadoop.security;
+import java.io.BufferedInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
@@ -148,8 +151,32 @@ public class Credentials implements Writ
in.close();
return credentials;
} catch(IOException ioe) {
+ throw new IOException("Exception reading " + filename, ioe);
+ } finally {
IOUtils.cleanup(LOG, in);
+ }
+ }
+
+ /**
+ * Convenience method for reading a token storage file, and loading the
Tokens
+ * therein in the passed UGI
+ * @param filename
+ * @param conf
+ * @throws IOException
+ */
+ public static Credentials readTokenStorageFile(File filename, Configuration
conf)
+ throws IOException {
+ DataInputStream in = null;
+ Credentials credentials = new Credentials();
+ try {
+ in = new DataInputStream(new BufferedInputStream(
+ new FileInputStream(filename)));
+ credentials.readTokenStorageStream(in);
+ return credentials;
+ } catch(IOException ioe) {
throw new IOException("Exception reading " + filename, ioe);
+ } finally {
+ IOUtils.cleanup(LOG, in);
}
}
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
Wed Jan 23 18:51:24 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.security;
import static
org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN;
import static
org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT;
+import java.io.File;
import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.security.AccessControlContext;
@@ -656,10 +657,11 @@ public class UserGroupInformation {
String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
if (fileLocation != null) {
- // load the token storage file and put all of the tokens into the
- // user.
+ // Load the token storage file and put all of the tokens into the
+ // user. Don't use the FileSystem API for reading since it has a lock
+ // cycle (HADOOP-9212).
Credentials cred = Credentials.readTokenStorageFile(
- new Path("file:///" + fileLocation), conf);
+ new File(fileLocation), conf);
loginUser.addCredentials(cred);
}
loginUser.spawnAutoRenewalThreadForUserCreds();
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
Wed Jan 23 18:51:24 2013
@@ -20,41 +20,78 @@ package org.apache.hadoop.util;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.HadoopVersionAnnotation;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
/**
- * This class finds the package info for Hadoop and the HadoopVersionAnnotation
- * information.
+ * This class returns build information about Hadoop components.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class VersionInfo {
private static final Log LOG = LogFactory.getLog(VersionInfo.class);
- private static Package myPackage;
- private static HadoopVersionAnnotation version;
-
- static {
- myPackage = HadoopVersionAnnotation.class.getPackage();
- version = myPackage.getAnnotation(HadoopVersionAnnotation.class);
+ private Properties info;
+
+ protected VersionInfo(String component) {
+ info = new Properties();
+ String versionInfoFile = component + "-version-info.properties";
+ try {
+ InputStream is = Thread.currentThread().getContextClassLoader()
+ .getResourceAsStream(versionInfoFile);
+ info.load(is);
+ } catch (IOException ex) {
+ LogFactory.getLog(getClass()).warn("Could not read '" +
+ versionInfoFile + "', " + ex.toString(), ex);
+ }
}
- /**
- * Get the meta-data for the Hadoop package.
- * @return
- */
- static Package getPackage() {
- return myPackage;
+ protected String _getVersion() {
+ return info.getProperty("version", "Unknown");
}
-
+
+ protected String _getRevision() {
+ return info.getProperty("revision", "Unknown");
+ }
+
+ protected String _getBranch() {
+ return info.getProperty("branch", "Unknown");
+ }
+
+ protected String _getDate() {
+ return info.getProperty("date", "Unknown");
+ }
+
+ protected String _getUser() {
+ return info.getProperty("user", "Unknown");
+ }
+
+ protected String _getUrl() {
+ return info.getProperty("url", "Unknown");
+ }
+
+ protected String _getSrcChecksum() {
+ return info.getProperty("srcChecksum", "Unknown");
+ }
+
+ protected String _getBuildVersion(){
+ return getVersion() +
+ " from " + _getRevision() +
+ " by " + _getUser() +
+ " source checksum " + _getSrcChecksum();
+ }
+
+ private static VersionInfo COMMON_VERSION_INFO = new VersionInfo("common");
/**
* Get the Hadoop version.
* @return the Hadoop version string, eg. "0.6.3-dev"
*/
public static String getVersion() {
- return version != null ? version.version() : "Unknown";
+ return COMMON_VERSION_INFO._getVersion();
}
/**
@@ -62,7 +99,7 @@ public class VersionInfo {
* @return the revision number, eg. "451451"
*/
public static String getRevision() {
- return version != null ? version.revision() : "Unknown";
+ return COMMON_VERSION_INFO._getRevision();
}
/**
@@ -70,7 +107,7 @@ public class VersionInfo {
* @return The branch name, e.g. "trunk" or "branches/branch-0.20"
*/
public static String getBranch() {
- return version != null ? version.branch() : "Unknown";
+ return COMMON_VERSION_INFO._getBranch();
}
/**
@@ -78,7 +115,7 @@ public class VersionInfo {
* @return the compilation date in unix date format
*/
public static String getDate() {
- return version != null ? version.date() : "Unknown";
+ return COMMON_VERSION_INFO._getDate();
}
/**
@@ -86,14 +123,14 @@ public class VersionInfo {
* @return the username of the user
*/
public static String getUser() {
- return version != null ? version.user() : "Unknown";
+ return COMMON_VERSION_INFO._getUser();
}
/**
* Get the subversion URL for the root Hadoop directory.
*/
public static String getUrl() {
- return version != null ? version.url() : "Unknown";
+ return COMMON_VERSION_INFO._getUrl();
}
/**
@@ -101,7 +138,7 @@ public class VersionInfo {
* built.
**/
public static String getSrcChecksum() {
- return version != null ? version.srcChecksum() : "Unknown";
+ return COMMON_VERSION_INFO._getSrcChecksum();
}
/**
@@ -109,14 +146,11 @@ public class VersionInfo {
* revision, user and date.
*/
public static String getBuildVersion(){
- return VersionInfo.getVersion() +
- " from " + VersionInfo.getRevision() +
- " by " + VersionInfo.getUser() +
- " source checksum " + VersionInfo.getSrcChecksum();
+ return COMMON_VERSION_INFO._getBuildVersion();
}
public static void main(String[] args) {
- LOG.debug("version: "+ version);
+ LOG.debug("version: "+ getVersion());
System.out.println("Hadoop " + getVersion());
System.out.println("Subversion " + getUrl() + " -r " + getRevision());
System.out.println("Compiled by " + getUser() + " on " + getDate());
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
Wed Jan 23 18:51:24 2013
@@ -80,9 +80,17 @@
<property>
<name>hadoop.security.group.mapping</name>
- <value>org.apache.hadoop.security.ShellBasedUnixGroupsMapping</value>
+
<value>org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback</value>
<description>
- Class for user to group mapping (get groups for a given user) for ACL
+ Class for user to group mapping (get groups for a given user) for ACL.
+ The default implementation,
+ org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback,
+ will determine if the Java Native Interface (JNI) is available. If JNI is
+ available the implementation will use the API within hadoop to resolve a
+ list of groups for a user. If JNI is not available then the shell
+ implementation, ShellBasedUnixGroupsMapping, is used. This implementation
+ shells out to the Linux/Unix environment with the
+ <code>bash -c groups</code> command to resolve a list of groups for a user.
</description>
</property>
@@ -566,6 +574,14 @@
</property>
<property>
+ <name>ipc.client.connect.timeout</name>
+ <value>20000</value>
+ <description>Indicates the number of milliseconds a client will wait for the
+ socket to establish a server connection.
+ </description>
+</property>
+
+<property>
<name>ipc.client.connect.max.retries.on.timeouts</name>
<value>45</value>
<description>Indicates the number of retries a client will make on socket
timeout
Propchange:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
Merged
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1433249-1437619
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
Wed Jan 23 18:51:24 2013
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs;
+import org.junit.Before;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
@@ -173,12 +174,26 @@ public class TestFileUtil {
//Expected an IOException
}
}
+
+ @Before
+ public void before() throws IOException {
+ cleanupImpl();
+ }
@After
public void tearDown() throws IOException {
- FileUtil.fullyDelete(del);
- FileUtil.fullyDelete(tmp);
- FileUtil.fullyDelete(partitioned);
+ cleanupImpl();
+ }
+
+ private void cleanupImpl() throws IOException {
+ FileUtil.fullyDelete(del, true);
+ Assert.assertTrue(!del.exists());
+
+ FileUtil.fullyDelete(tmp, true);
+ Assert.assertTrue(!tmp.exists());
+
+ FileUtil.fullyDelete(partitioned, true);
+ Assert.assertTrue(!partitioned.exists());
}
@Test
@@ -269,12 +284,14 @@ public class TestFileUtil {
Assert.assertTrue(new File(tmp, FILE).exists());
}
- private File xSubDir = new File(del, "xsubdir");
- private File ySubDir = new File(del, "ysubdir");
- static String file1Name = "file1";
- private File file2 = new File(xSubDir, "file2");
- private File file3 = new File(ySubDir, "file3");
- private File zlink = new File(del, "zlink");
+ private final File xSubDir = new File(del, "xSubDir");
+ private final File xSubSubDir = new File(xSubDir, "xSubSubDir");
+ private final File ySubDir = new File(del, "ySubDir");
+ private static final String file1Name = "file1";
+ private final File file2 = new File(xSubDir, "file2");
+ private final File file22 = new File(xSubSubDir, "file22");
+ private final File file3 = new File(ySubDir, "file3");
+ private final File zlink = new File(del, "zlink");
/**
* Creates a directory which can not be deleted completely.
@@ -286,10 +303,14 @@ public class TestFileUtil {
* |
* .---------------------------------------,
* | | | |
- * file1(!w) xsubdir(-w) ysubdir(+w) zlink
- * | |
- * file2 file3
- *
+ * file1(!w) xSubDir(-rwx) ySubDir(+w) zlink
+ * | | |
+ * | file2(-rwx) file3
+ * |
+ * xSubSubDir(-rwx)
+ * |
+ * file22(-rwx)
+ *
* @throws IOException
*/
private void setupDirsAndNonWritablePermissions() throws IOException {
@@ -302,7 +323,16 @@ public class TestFileUtil {
xSubDir.mkdirs();
file2.createNewFile();
- xSubDir.setWritable(false);
+
+ xSubSubDir.mkdirs();
+ file22.createNewFile();
+
+ revokePermissions(file22);
+ revokePermissions(xSubSubDir);
+
+ revokePermissions(file2);
+ revokePermissions(xSubDir);
+
ySubDir.mkdirs();
file3.createNewFile();
@@ -314,23 +344,43 @@ public class TestFileUtil {
FileUtil.symLink(tmpFile.toString(), zlink.toString());
}
+ private static void grantPermissions(final File f) {
+ f.setReadable(true);
+ f.setWritable(true);
+ f.setExecutable(true);
+ }
+
+ private static void revokePermissions(final File f) {
+ f.setWritable(false);
+ f.setExecutable(false);
+ f.setReadable(false);
+ }
+
// Validates the return value.
- // Validates the existence of directory "xsubdir" and the file "file1"
- // Sets writable permissions for the non-deleted dir "xsubdir" so that it can
- // be deleted in tearDown().
- private void validateAndSetWritablePermissions(boolean ret) {
- xSubDir.setWritable(true);
- Assert.assertFalse("The return value should have been false!", ret);
- Assert.assertTrue("The file file1 should not have been deleted!",
+ // Validates the existence of the file "file1"
+ private void validateAndSetWritablePermissions(
+ final boolean expectedRevokedPermissionDirsExist, final boolean ret) {
+ grantPermissions(xSubDir);
+ grantPermissions(xSubSubDir);
+
+ Assert.assertFalse("The return value should have been false.", ret);
+ Assert.assertTrue("The file file1 should not have been deleted.",
new File(del, file1Name).exists());
- Assert.assertTrue(
- "The directory xsubdir should not have been deleted!",
- xSubDir.exists());
- Assert.assertTrue("The file file2 should not have been deleted!",
- file2.exists());
- Assert.assertFalse("The directory ysubdir should have been deleted!",
+
+ Assert.assertEquals(
+ "The directory xSubDir *should* not have been deleted.",
+ expectedRevokedPermissionDirsExist, xSubDir.exists());
+ Assert.assertEquals("The file file2 *should* not have been deleted.",
+ expectedRevokedPermissionDirsExist, file2.exists());
+ Assert.assertEquals(
+ "The directory xSubSubDir *should* not have been deleted.",
+ expectedRevokedPermissionDirsExist, xSubSubDir.exists());
+ Assert.assertEquals("The file file22 *should* not have been deleted.",
+ expectedRevokedPermissionDirsExist, file22.exists());
+
+ Assert.assertFalse("The directory ySubDir should have been deleted.",
ySubDir.exists());
- Assert.assertFalse("The link zlink should have been deleted!",
+ Assert.assertFalse("The link zlink should have been deleted.",
zlink.exists());
}
@@ -339,7 +389,15 @@ public class TestFileUtil {
LOG.info("Running test to verify failure of fullyDelete()");
setupDirsAndNonWritablePermissions();
boolean ret = FileUtil.fullyDelete(new MyFile(del));
- validateAndSetWritablePermissions(ret);
+ validateAndSetWritablePermissions(true, ret);
+ }
+
+ @Test
+ public void testFailFullyDeleteGrantPermissions() throws IOException {
+ setupDirsAndNonWritablePermissions();
+ boolean ret = FileUtil.fullyDelete(new MyFile(del), true);
+ // this time the directories with revoked permissions *should* be deleted:
+ validateAndSetWritablePermissions(false, ret);
}
/**
@@ -388,7 +446,10 @@ public class TestFileUtil {
*/
@Override
public File[] listFiles() {
- File[] files = super.listFiles();
+ final File[] files = super.listFiles();
+ if (files == null) {
+ return null;
+ }
List<File> filesList = Arrays.asList(files);
Collections.sort(filesList);
File[] myFiles = new MyFile[files.length];
@@ -405,10 +466,18 @@ public class TestFileUtil {
LOG.info("Running test to verify failure of fullyDeleteContents()");
setupDirsAndNonWritablePermissions();
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));
- validateAndSetWritablePermissions(ret);
+ validateAndSetWritablePermissions(true, ret);
}
@Test
+ public void testFailFullyDeleteContentsGrantPermissions() throws IOException
{
+ setupDirsAndNonWritablePermissions();
+ boolean ret = FileUtil.fullyDeleteContents(new MyFile(del), true);
+ // this time the directories with revoked permissions *should* be deleted:
+ validateAndSetWritablePermissions(false, ret);
+ }
+
+ @Test
public void testCopyMergeSingleDirectory() throws IOException {
setupDirs();
boolean copyMergeResult = copyMerge("partitioned", "tmp/merged");
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
Wed Jan 23 18:51:24 2013
@@ -120,6 +120,18 @@ public class TestHttpServer extends Http
}
@SuppressWarnings("serial")
+ public static class LongHeaderServlet extends HttpServlet {
+ @SuppressWarnings("unchecked")
+ @Override
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response
+ ) throws ServletException, IOException {
+ Assert.assertEquals(63 * 1024, request.getHeader("longheader").length());
+ response.setStatus(HttpServletResponse.SC_OK);
+ }
+ }
+
+ @SuppressWarnings("serial")
public static class HtmlContentServlet extends HttpServlet {
@Override
public void doGet(HttpServletRequest request,
@@ -139,6 +151,7 @@ public class TestHttpServer extends Http
server.addServlet("echo", "/echo", EchoServlet.class);
server.addServlet("echomap", "/echomap", EchoMapServlet.class);
server.addServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class);
+ server.addServlet("longheader", "/longheader", LongHeaderServlet.class);
server.addJerseyResourcePackage(
JerseyResource.class.getPackage().getName(), "/jersey/*");
server.start();
@@ -197,6 +210,22 @@ public class TestHttpServer extends Http
readOutput(new URL(baseUrl, "/echomap?a=b&c<=d&a=>")));
}
+ /**
+ * Test that verifies headers can be up to 64K long.
+ * The test adds a 63K header leaving 1K for other headers.
+ * This is because the header buffer setting is for ALL headers,
+ * names and values included. */
+ @Test public void testLongHeader() throws Exception {
+ URL url = new URL(baseUrl, "/longheader");
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0 ; i < 63 * 1024; i++) {
+ sb.append("a");
+ }
+ conn.setRequestProperty("longheader", sb.toString());
+ assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+ }
+
@Test public void testContentTypes() throws Exception {
// Static CSS files should have text/css
URL cssUrl = new URL(baseUrl, "/static/test.css");
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java
Wed Jan 23 18:51:24 2013
@@ -256,5 +256,17 @@ public class TestCodecFactory extends Te
checkCodec("overridden factory for .gz", NewGzipCodec.class, codec);
codec = factory.getCodecByClassName(NewGzipCodec.class.getCanonicalName());
checkCodec("overridden factory for gzip codec", NewGzipCodec.class, codec);
+
+ Configuration conf = new Configuration();
+ conf.set("io.compression.codecs",
+ " org.apache.hadoop.io.compress.GzipCodec , " +
+ " org.apache.hadoop.io.compress.DefaultCodec , " +
+ " org.apache.hadoop.io.compress.BZip2Codec ");
+ try {
+ CompressionCodecFactory.getCodecClasses(conf);
+ } catch (IllegalArgumentException e) {
+ fail("IllegalArgumentException is unexpected");
+ }
+
}
}
Modified:
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java?rev=1437623&r1=1437622&r2=1437623&view=diff
==============================================================================
---
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
(original)
+++
hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
Wed Jan 23 18:51:24 2013
@@ -62,7 +62,6 @@ public class TestIPC {
final private static Configuration conf = new Configuration();
final static private int PING_INTERVAL = 1000;
final static private int MIN_SLEEP_TIME = 1000;
-
/**
* Flag used to turn off the fault injection behavior
* of the various writables.
@@ -499,6 +498,26 @@ public class TestIPC {
client.call(new LongWritable(RANDOM.nextLong()),
addr, null, null, 3*PING_INTERVAL+MIN_SLEEP_TIME, conf);
}
+
+ @Test
+ public void testIpcConnectTimeout() throws Exception {
+ // start server
+ Server server = new TestServer(1, true);
+ InetSocketAddress addr = NetUtils.getConnectAddress(server);
+ //Intentionally do not start server to get a connection timeout
+
+ // start client
+ Client.setConnectTimeout(conf, 100);
+ Client client = new Client(LongWritable.class, conf);
+ // set the rpc timeout to twice the MIN_SLEEP_TIME
+ try {
+ client.call(new LongWritable(RANDOM.nextLong()),
+ addr, null, null, MIN_SLEEP_TIME*2, conf);
+ fail("Expected an exception to have been thrown");
+ } catch (SocketTimeoutException e) {
+ LOG.info("Get a SocketTimeoutException ", e);
+ }
+ }
/**
* Check that file descriptors aren't leaked by starting