Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1390763-1397380 Merged /hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java:r1363593-1396941
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java Fri Oct 12 00:15:22 2012 @@ -89,7 +89,11 @@ class ChRootedFileSystem extends FilterF public ChRootedFileSystem(final URI uri, Configuration conf) throws IOException { super(FileSystem.get(uri, conf)); - chRootPathPart = new Path(uri.getPath()); + String pathString = uri.getPath(); + if (pathString.isEmpty()) { + pathString = "/"; + } + chRootPathPart = new Path(pathString); chRootPathPartString = chRootPathPart.toUri().getPath(); myUri = uri; workingDir = getHomeDirectory(); Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java Fri Oct 12 00:15:22 2012 @@ -205,9 +205,13 @@ public class ViewFs extends AbstractFile protected AbstractFileSystem getTargetFileSystem(final URI uri) throws URISyntaxException, UnsupportedFileSystemException { + String pathString = uri.getPath(); + if (pathString.isEmpty()) { + pathString = "/"; + } return new ChRootedFs( AbstractFileSystem.createFileSystem(uri, config), - new Path(uri.getPath())); + new Path(pathString)); } @Override Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java Fri Oct 12 00:15:22 2012 @@ -24,7 +24,6 @@ import java.io.OutputStream; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.compress.snappy.LoadSnappy; import org.apache.hadoop.io.compress.snappy.SnappyCompressor; import org.apache.hadoop.io.compress.snappy.SnappyDecompressor; import org.apache.hadoop.fs.CommonConfigurationKeys; @@ -34,11 +33,6 @@ import org.apache.hadoop.util.NativeCode * This class creates snappy compressors/decompressors. */ public class SnappyCodec implements Configurable, CompressionCodec { - - static { - LoadSnappy.isLoaded(); - } - Configuration conf; /** @@ -63,11 +57,26 @@ public class SnappyCodec implements Conf /** * Are the native snappy libraries loaded & initialized? - * - * @return true if loaded & initialized, otherwise false */ + public static void checkNativeCodeLoaded() { + if (!NativeCodeLoader.buildSupportsSnappy()) { + throw new RuntimeException("native snappy library not available: " + + "this version of libhadoop was built without " + + "snappy support."); + } + if (!SnappyCompressor.isNativeCodeLoaded()) { + throw new RuntimeException("native snappy library not available: " + + "SnappyCompressor has not been loaded."); + } + if (!SnappyDecompressor.isNativeCodeLoaded()) { + throw new RuntimeException("native snappy library not available: " + + "SnappyDecompressor has not been loaded."); + } + } + public static boolean isNativeCodeLoaded() { - return LoadSnappy.isLoaded() && NativeCodeLoader.isNativeCodeLoaded(); + return SnappyCompressor.isNativeCodeLoaded() && + SnappyDecompressor.isNativeCodeLoaded(); } /** @@ -97,9 +106,7 @@ public class SnappyCodec implements Conf public CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) throws IOException { - if (!isNativeCodeLoaded()) { - throw new RuntimeException("native snappy library not available"); - } + checkNativeCodeLoaded(); int bufferSize = conf.getInt( CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY, CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT); @@ -117,10 +124,7 @@ public class SnappyCodec implements Conf */ @Override public Class<? extends Compressor> getCompressorType() { - if (!isNativeCodeLoaded()) { - throw new RuntimeException("native snappy library not available"); - } - + checkNativeCodeLoaded(); return SnappyCompressor.class; } @@ -131,9 +135,7 @@ public class SnappyCodec implements Conf */ @Override public Compressor createCompressor() { - if (!isNativeCodeLoaded()) { - throw new RuntimeException("native snappy library not available"); - } + checkNativeCodeLoaded(); int bufferSize = conf.getInt( CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY, CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT); @@ -167,10 +169,7 @@ public class SnappyCodec implements Conf public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { - if (!isNativeCodeLoaded()) { - throw new RuntimeException("native snappy library not available"); - } - + checkNativeCodeLoaded(); return new BlockDecompressorStream(in, decompressor, conf.getInt( CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY, CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT)); @@ -183,10 +182,7 @@ public class SnappyCodec implements Conf */ @Override public Class<? extends Decompressor> getDecompressorType() { - if (!isNativeCodeLoaded()) { - throw new RuntimeException("native snappy library not available"); - } - + checkNativeCodeLoaded(); return SnappyDecompressor.class; } @@ -197,9 +193,7 @@ public class SnappyCodec implements Conf */ @Override public Decompressor createDecompressor() { - if (!isNativeCodeLoaded()) { - throw new RuntimeException("native snappy library not available"); - } + checkNativeCodeLoaded(); int bufferSize = conf.getInt( CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY, CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT); Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java Fri Oct 12 00:15:22 2012 @@ -26,6 +26,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; +import org.apache.hadoop.util.NativeCodeLoader; /** * A {@link Compressor} based on the snappy compression algorithm. @@ -51,22 +52,24 @@ public class SnappyCompressor implements private long bytesRead = 0L; private long bytesWritten = 0L; - + private static boolean nativeSnappyLoaded = false; + static { - if (LoadSnappy.isLoaded()) { - // Initialize the native library + if (NativeCodeLoader.isNativeCodeLoaded() && + NativeCodeLoader.buildSupportsSnappy()) { try { initIDs(); + nativeSnappyLoaded = true; } catch (Throwable t) { - // Ignore failure to load/initialize snappy - LOG.warn(t.toString()); + LOG.error("failed to load SnappyCompressor", t); } - } else { - LOG.error("Cannot load " + SnappyCompressor.class.getName() + - " without snappy library!"); } } - + + public static boolean isNativeCodeLoaded() { + return nativeSnappyLoaded; + } + /** * Creates a new compressor. * Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java Fri Oct 12 00:15:22 2012 @@ -25,6 +25,7 @@ import java.nio.ByteBuffer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.compress.Decompressor; +import org.apache.hadoop.util.NativeCodeLoader; /** * A {@link Decompressor} based on the snappy compression algorithm. @@ -47,21 +48,24 @@ public class SnappyDecompressor implemen private int userBufOff = 0, userBufLen = 0; private boolean finished; + private static boolean nativeSnappyLoaded = false; + static { - if (LoadSnappy.isLoaded()) { - // Initialize the native library + if (NativeCodeLoader.isNativeCodeLoaded() && + NativeCodeLoader.buildSupportsSnappy()) { try { initIDs(); + nativeSnappyLoaded = true; } catch (Throwable t) { - // Ignore failure to load/initialize snappy - LOG.warn(t.toString()); + LOG.error("failed to load SnappyDecompressor", t); } - } else { - LOG.error("Cannot load " + SnappyDecompressor.class.getName() + - " without snappy library!"); } } - + + public static boolean isNativeCodeLoaded() { + return nativeSnappyLoaded; + } + /** * Creates a new compressor. * Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Fri Oct 12 00:15:22 2012 @@ -87,7 +87,6 @@ import org.apache.hadoop.security.SaslRp import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler; import org.apache.hadoop.security.SaslRpcServer.SaslStatus; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.security.authorize.ProxyUsers; @@ -1374,20 +1373,38 @@ public abstract class Server { dataLengthBuffer.clear(); if (authMethod == null) { throw new IOException("Unable to read authentication method"); - } - if (isSecurityEnabled && authMethod == AuthMethod.SIMPLE) { - AccessControlException ae = new AccessControlException("Authorization (" - + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION - + ") is enabled but authentication (" - + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION - + ") is configured as simple. Please configure another method " - + "like kerberos or digest."); - setupResponse(authFailedResponse, authFailedCall, RpcStatusProto.FATAL, - null, ae.getClass().getName(), ae.getMessage()); - responder.doRespond(authFailedCall); - throw ae; - } - if (!isSecurityEnabled && authMethod != AuthMethod.SIMPLE) { + } + final boolean clientUsingSasl; + switch (authMethod) { + case SIMPLE: { // no sasl for simple + if (isSecurityEnabled) { + AccessControlException ae = new AccessControlException("Authorization (" + + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION + + ") is enabled but authentication (" + + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION + + ") is configured as simple. Please configure another method " + + "like kerberos or digest."); + setupResponse(authFailedResponse, authFailedCall, RpcStatusProto.FATAL, + null, ae.getClass().getName(), ae.getMessage()); + responder.doRespond(authFailedCall); + throw ae; + } + clientUsingSasl = false; + useSasl = false; + break; + } + case DIGEST: { + clientUsingSasl = true; + useSasl = (secretManager != null); + break; + } + default: { + clientUsingSasl = true; + useSasl = isSecurityEnabled; + break; + } + } + if (clientUsingSasl && !useSasl) { doSaslReply(SaslStatus.SUCCESS, new IntWritable( SaslRpcServer.SWITCH_TO_SIMPLE_AUTH), null, null); authMethod = AuthMethod.SIMPLE; @@ -1396,9 +1413,6 @@ public abstract class Server { // to simple auth from now on. skipInitialSaslHandshake = true; } - if (authMethod != AuthMethod.SIMPLE) { - useSasl = true; - } connectionHeaderBuf = null; connectionHeaderRead = true; @@ -1532,8 +1546,6 @@ public abstract class Server { UserGroupInformation realUser = user; user = UserGroupInformation.createProxyUser(protocolUser .getUserName(), realUser); - // Now the user is a proxy user, set Authentication method Proxy. - user.setAuthenticationMethod(AuthenticationMethod.PROXY); } } } @@ -1883,7 +1895,7 @@ public abstract class Server { // Create the responder here responder = new Responder(); - if (isSecurityEnabled) { + if (secretManager != null) { SaslRpcServer.init(conf); } Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java Fri Oct 12 00:15:22 2012 @@ -210,9 +210,12 @@ public class SampleQuantiles { int rankMin = 0; int desired = (int) (quantile * count); + ListIterator<SampleItem> it = samples.listIterator(); + SampleItem prev = null; + SampleItem cur = it.next(); for (int i = 1; i < samples.size(); i++) { - SampleItem prev = samples.get(i - 1); - SampleItem cur = samples.get(i); + prev = cur; + cur = it.next(); rankMin += prev.g; Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java Fri Oct 12 00:15:22 2012 @@ -499,7 +499,7 @@ public class SecurityUtil { * @throws IOException If unable to authenticate via SPNEGO */ public static URLConnection openSecureHttpConnection(URL url) throws IOException { - if(!UserGroupInformation.isSecurityEnabled()) { + if (!HttpConfig.isSecure() && !UserGroupInformation.isSecurityEnabled()) { return url.openConnection(); } Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java Fri Oct 12 00:15:22 2012 @@ -160,7 +160,7 @@ public class FileBasedKeyStoresFactory i } finally { is.close(); } - LOG.info(mode.toString() + " Loaded KeyStore: " + keystoreLocation); + LOG.debug(mode.toString() + " Loaded KeyStore: " + keystoreLocation); } else { keystore.load(null, null); } @@ -201,7 +201,7 @@ public class FileBasedKeyStoresFactory i truststorePassword, truststoreReloadInterval); trustManager.init(); - LOG.info(mode.toString() + " Loaded TrustStore: " + truststoreLocation); + LOG.debug(mode.toString() + " Loaded TrustStore: " + truststoreLocation); trustManagers = new TrustManager[]{trustManager}; } Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java Fri Oct 12 00:15:22 2012 @@ -75,6 +75,11 @@ public class NativeCodeLoader { } /** + * Returns true only if this build was compiled with support for snappy. + */ + public static native boolean buildSupportsSnappy(); + + /** * Return if native hadoop libraries, if present, can be used for this job. * @param conf configuration * Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java Fri Oct 12 00:15:22 2012 @@ -34,6 +34,7 @@ import java.util.List; import java.util.Locale; import java.util.StringTokenizer; +import com.google.common.net.InetAddresses; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.Path; @@ -77,6 +78,9 @@ public class StringUtils { * @return the hostname to the first dot */ public static String simpleHostname(String fullHostname) { + if (InetAddresses.isInetAddress(fullHostname)) { + return fullHostname; + } int offset = fullHostname.indexOf('.'); if (offset != -1) { return fullHostname.substring(0, offset); Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-policy.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-policy.xml?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-policy.xml (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-policy.xml Fri Oct 12 00:15:22 2012 @@ -239,5 +239,12 @@ group list is separated by a blank. For e.g. "alice,bob users,wheel". A special value of "*" means all users are allowed.</description> </property> + + <property> + <name>security.qjournal.service.protocol.acl</name> + <value>${HADOOP_HDFS_USER}</value> + <description>ACL for QJournalProtocol, used by the NN to communicate with + JNs when using the QuorumJournalManager for edit logs.</description> + </property> </configuration> Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem Fri Oct 12 00:15:22 2012 @@ -17,6 +17,5 @@ org.apache.hadoop.fs.LocalFileSystem org.apache.hadoop.fs.viewfs.ViewFileSystem org.apache.hadoop.fs.s3.S3FileSystem org.apache.hadoop.fs.s3native.NativeS3FileSystem -org.apache.hadoop.fs.kfs.KosmosFileSystem org.apache.hadoop.fs.ftp.FTPFileSystem org.apache.hadoop.fs.HarFileSystem Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Fri Oct 12 00:15:22 2012 @@ -774,42 +774,6 @@ <description>Replication factor</description> </property> -<!-- Kosmos File System --> - -<property> - <name>kfs.stream-buffer-size</name> - <value>4096</value> - <description>The size of buffer to stream files. - The size of this buffer should probably be a multiple of hardware - page size (4096 on Intel x86), and it determines how much data is - buffered during read and write operations.</description> -</property> - -<property> - <name>kfs.bytes-per-checksum</name> - <value>512</value> - <description>The number of bytes per checksum. Must not be larger than - kfs.stream-buffer-size</description> -</property> - -<property> - <name>kfs.client-write-packet-size</name> - <value>65536</value> - <description>Packet size for clients to write</description> -</property> - -<property> - <name>kfs.blocksize</name> - <value>67108864</value> - <description>Block size</description> -</property> - -<property> - <name>kfs.replication</name> - <value>3</value> - <description>Replication factor</description> -</property> - <!-- FTP file system --> <property> <name>ftp.stream-buffer-size</name> Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/ ------------------------------------------------------------------------------ Merged /hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/core:r1363593-1396941 Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1390763-1397380 Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java Fri Oct 12 00:15:22 2012 @@ -342,6 +342,15 @@ public class TestChRootedFileSystem { chrootFs.close(); verify(mockFs).delete(eq(rawPath), eq(true)); } + + @Test + public void testURIEmptyPath() throws IOException { + Configuration conf = new Configuration(); + conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); + + URI chrootUri = URI.create("mockfs://foo"); + new ChRootedFileSystem(chrootUri, conf); + } static class MockFileSystem extends FilterFileSystem { MockFileSystem() { Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java Fri Oct 12 00:15:22 2012 @@ -54,7 +54,6 @@ import org.apache.hadoop.io.SequenceFile import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.SequenceFile.CompressionType; -import org.apache.hadoop.io.compress.snappy.LoadSnappy; import org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor; import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater; import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater; @@ -103,14 +102,9 @@ public class TestCodec { @Test public void testSnappyCodec() throws IOException { - if (LoadSnappy.isAvailable()) { - if (LoadSnappy.isLoaded()) { - codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.SnappyCodec"); - codecTest(conf, seed, count, "org.apache.hadoop.io.compress.SnappyCodec"); - } - else { - Assert.fail("Snappy native available but Hadoop native not"); - } + if (SnappyCodec.isNativeCodeLoaded()) { + codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.SnappyCodec"); + codecTest(conf, seed, count, "org.apache.hadoop.io.compress.SnappyCodec"); } } Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java Fri Oct 12 00:15:22 2012 @@ -60,6 +60,7 @@ import org.apache.hadoop.security.token. import org.apache.hadoop.security.token.TokenSelector; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.log4j.Level; +import org.junit.BeforeClass; import org.junit.Test; /** Unit tests for using Sasl over RPC. */ @@ -76,7 +77,8 @@ public class TestSaslRPC { static final String SERVER_PRINCIPAL_2 = "p2/foo@BAR"; private static Configuration conf; - static { + @BeforeClass + public static void setup() { conf = new Configuration(); conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos"); UserGroupInformation.setConfiguration(conf); @@ -449,11 +451,25 @@ public class TestSaslRPC { } @Test - public void testDigestAuthMethod() throws Exception { + public void testDigestAuthMethodSecureServer() throws Exception { + checkDigestAuthMethod(true); + } + + @Test + public void testDigestAuthMethodInsecureServer() throws Exception { + checkDigestAuthMethod(false); + } + + private void checkDigestAuthMethod(boolean secureServer) throws Exception { TestTokenSecretManager sm = new TestTokenSecretManager(); Server server = new RPC.Builder(conf).setProtocol(TestSaslProtocol.class) .setInstance(new TestSaslImpl()).setBindAddress(ADDRESS).setPort(0) .setNumHandlers(5).setVerbose(true).setSecretManager(sm).build(); + if (secureServer) { + server.enableSecurity(); + } else { + server.disableSecurity(); + } server.start(); final UserGroupInformation current = UserGroupInformation.getCurrentUser(); Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java Fri Oct 12 00:15:22 2012 @@ -25,6 +25,7 @@ import java.util.Random; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; import org.apache.commons.logging.Log; @@ -48,6 +49,8 @@ import com.google.common.collect.Sets; */ public abstract class GenericTestUtils { + private static final AtomicInteger sequence = new AtomicInteger(); + /** * Extracts the name of the method where the invocation has happened * @return String name of the invoking method @@ -55,6 +58,14 @@ public abstract class GenericTestUtils { public static String getMethodName() { return Thread.currentThread().getStackTrace()[2].getMethodName(); } + + /** + * Generates a process-wide unique sequence number. + * @return an unique sequence number + */ + public static int uniqueSequenceId() { + return sequence.incrementAndGet(); + } /** * Assert that a given file exists. @@ -104,7 +115,10 @@ public abstract class GenericTestUtils { Thread.sleep(checkEveryMillis); } while (Time.now() - st < waitForMillis); - throw new TimeoutException("Timed out waiting for condition"); + + throw new TimeoutException("Timed out waiting for condition. " + + "Thread diagnostics:\n" + + TimedOutTestsListener.buildThreadDiagnosticString()); } public static class LogCapturer { Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java Fri Oct 12 00:15:22 2012 @@ -58,18 +58,27 @@ public class TimedOutTestsListener exten && failure.getMessage().startsWith(TEST_TIMED_OUT_PREFIX)) { output.println("====> TEST TIMED OUT. PRINTING THREAD DUMP. <===="); output.println(); - DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss,SSS"); - output.println(String.format("Timestamp: %s", dateFormat.format(new Date()))); + output.print(buildThreadDiagnosticString()); + } + } + + public static String buildThreadDiagnosticString() { + StringWriter sw = new StringWriter(); + PrintWriter output = new PrintWriter(sw); + + DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss,SSS"); + output.println(String.format("Timestamp: %s", dateFormat.format(new Date()))); + output.println(); + output.println(buildThreadDump()); + + String deadlocksInfo = buildDeadlockInfo(); + if (deadlocksInfo != null) { + output.println("====> DEADLOCKS DETECTED <===="); output.println(); - output.println(buildThreadDump()); - - String deadlocksInfo = buildDeadlockInfo(); - if (deadlocksInfo != null) { - output.println("====> DEADLOCKS DETECTED <===="); - output.println(); - output.println(deadlocksInfo); - } + output.println(deadlocksInfo); } + + return sw.toString(); } static String buildThreadDump() { Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java?rev=1397387&r1=1397386&r2=1397387&view=diff ============================================================================== --- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java (original) +++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java Fri Oct 12 00:15:22 2012 @@ -282,6 +282,19 @@ public class TestStringUtils extends Uni } } + @Test + public void testSimpleHostName() { + assertEquals("Should return hostname when FQDN is specified", + "hadoop01", + StringUtils.simpleHostname("hadoop01.domain.com")); + assertEquals("Should return hostname when only hostname is specified", + "hadoop01", + StringUtils.simpleHostname("hadoop01")); + assertEquals("Should not truncate when IP address is passed", + "10.10.5.68", + StringUtils.simpleHostname("10.10.5.68")); + } + // Benchmark for StringUtils split public static void main(String []args) { final String TO_SPLIT = "foo,bar,baz,blah,blah";
