Author: arp Date: Thu Oct 24 20:33:04 2013 New Revision: 1535534 URL: http://svn.apache.org/r1535534 Log: Merging r1535122 through r1535532 from trunk to HDFS-2832
Added: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestURLConnectionFactory.java - copied unchanged from r1535532, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestURLConnectionFactory.java Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/ (props changed) hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/ (props changed) hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java Propchange: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:r1535122-1535532 Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1535534&r1=1535533&r2=1535534&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Thu Oct 24 20:33:04 2013 @@ -335,10 +335,16 @@ Release 2.3.0 - UNRELEASED HDFS-4885. Improve the verifyBlockPlacement() API in BlockPlacementPolicy. (Junping Du via szetszwo) + HDFS-5363. Refactor WebHdfsFileSystem: move SPENGO-authenticated connection + creation to URLConnectionFactory. (Haohui Mai via jing9) + OPTIMIZATIONS HDFS-5239. Allow FSNamesystem lock fairness to be configurable (daryn) + HDFS-5341. Reduce fsdataset lock duration during directory scanning. + (Qus-Jiawei via kihwal) + BUG FIXES HDFS-5034. Remove debug prints from GetFileLinkInfo (Andrew Wang via Colin Patrick McCabe) Propchange: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java:r1535122-1535532 Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java?rev=1535534&r1=1535533&r2=1535534&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java Thu Oct 24 20:33:04 2013 @@ -191,6 +191,11 @@ public class DirectoryScanner implements private final FsVolumeSpi volume; + /** + * Get the file's length in async block scan + */ + private final long blockFileLength; + private final static Pattern CONDENSED_PATH_REGEX = Pattern.compile("(?<!^)(\\\\|/){2,}"); @@ -235,6 +240,7 @@ public class DirectoryScanner implements getCondensedPath(vol.getBasePath()); this.blockSuffix = blockFile == null ? null : getSuffix(blockFile, condensedVolPath); + this.blockFileLength = (blockFile != null) ? blockFile.length() : 0; if (metaFile == null) { this.metaSuffix = null; } else if (blockFile == null) { @@ -251,6 +257,10 @@ public class DirectoryScanner implements new File(volume.getBasePath(), blockSuffix); } + long getBlockFileLength() { + return blockFileLength; + } + File getMetaFile() { if (metaSuffix == null) { return null; @@ -458,7 +468,7 @@ public class DirectoryScanner implements // Block metadata file exits and block file is missing addDifference(diffRecord, statsRecord, info); } else if (info.getGenStamp() != memBlock.getGenerationStamp() - || info.getBlockFile().length() != memBlock.getNumBytes()) { + || info.getBlockFileLength() != memBlock.getNumBytes()) { // Block metadata file is missing or has wrong generation stamp, // or block file length is different than expected statsRecord.mismatchBlocks++; Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java?rev=1535534&r1=1535533&r2=1535534&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java Thu Oct 24 20:33:04 2013 @@ -19,49 +19,114 @@ package org.apache.hadoop.hdfs.web; import java.io.IOException; +import java.net.HttpURLConnection; import java.net.URL; import java.net.URLConnection; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hdfs.web.resources.HttpOpParam; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authentication.client.AuthenticatedURL; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; /** * Utilities for handling URLs */ -@InterfaceAudience.LimitedPrivate({"HDFS"}) +@InterfaceAudience.LimitedPrivate({ "HDFS" }) @InterfaceStability.Unstable public class URLConnectionFactory { + private static final Log LOG = LogFactory.getLog(URLConnectionFactory.class); + + /** SPNEGO authenticator */ + private static final KerberosUgiAuthenticator AUTH = new KerberosUgiAuthenticator(); + /** * Timeout for socket connects and reads */ - public final static int DEFAULT_SOCKET_TIMEOUT = 1*60*1000; // 1 minute + public final static int DEFAULT_SOCKET_TIMEOUT = 1 * 60 * 1000; // 1 minute + + public static final URLConnectionFactory DEFAULT_CONNECTION_FACTORY = new URLConnectionFactory( + DEFAULT_SOCKET_TIMEOUT); - public static final URLConnectionFactory DEFAULT_CONNECTION_FACTORY = new URLConnectionFactory(DEFAULT_SOCKET_TIMEOUT); - private int socketTimeout; + /** Configure connections for AuthenticatedURL */ + private ConnectionConfigurator connConfigurator = new ConnectionConfigurator() { + @Override + public HttpURLConnection configure(HttpURLConnection conn) + throws IOException { + URLConnectionFactory.setTimeouts(conn, socketTimeout); + return conn; + } + }; + public URLConnectionFactory(int socketTimeout) { this.socketTimeout = socketTimeout; } - + /** * Opens a url with read and connect timeouts - * @param url to open + * + * @param url + * to open * @return URLConnection * @throws IOException */ public URLConnection openConnection(URL url) throws IOException { URLConnection connection = url.openConnection(); - setTimeouts(connection); - return connection; + if (connection instanceof HttpURLConnection) { + connConfigurator.configure((HttpURLConnection) connection); + } + return connection; + } + + /** + * Opens a url with read and connect timeouts + * + * @param url URL to open + * @return URLConnection + * @throws IOException + * @throws AuthenticationException + */ + public URLConnection openConnection(HttpOpParam.Op op, URL url) + throws IOException, AuthenticationException { + if (op.getRequireAuth()) { + if (LOG.isDebugEnabled()) { + LOG.debug("open AuthenticatedURL connection" + url); + } + UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab(); + final AuthenticatedURL.Token authToken = new AuthenticatedURL.Token(); + return new AuthenticatedURL(AUTH, connConfigurator).openConnection(url, + authToken); + } else { + if (LOG.isDebugEnabled()) { + LOG.debug("open URL connection"); + } + return openConnection(url); + } + } + + public ConnectionConfigurator getConnConfigurator() { + return connConfigurator; + } + + public void setConnConfigurator(ConnectionConfigurator connConfigurator) { + this.connConfigurator = connConfigurator; } /** * Sets timeout parameters on the given URLConnection. * - * @param connection URLConnection to set + * @param connection + * URLConnection to set + * @param socketTimeout + * the connection and read timeout of the connection. */ - public void setTimeouts(URLConnection connection) { + static void setTimeouts(URLConnection connection, int socketTimeout) { connection.setConnectTimeout(socketTimeout); connection.setReadTimeout(socketTimeout); } Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1535534&r1=1535533&r2=1535534&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Thu Oct 24 20:33:04 2013 @@ -94,9 +94,7 @@ import org.apache.hadoop.ipc.RemoteExcep import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.authentication.client.AuthenticatedURL; import org.apache.hadoop.security.authentication.client.AuthenticationException; -import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenRenewer; @@ -119,20 +117,9 @@ public class WebHdfsFileSystem extends F /** Http URI: http://namenode:port/{PATH_PREFIX}/path/to/file */ public static final String PATH_PREFIX = "/" + SCHEME + "/v" + VERSION; - /** SPNEGO authenticator */ - private static final KerberosUgiAuthenticator AUTH = new KerberosUgiAuthenticator(); /** Default connection factory may be overridden in tests to use smaller timeout values */ URLConnectionFactory connectionFactory = URLConnectionFactory.DEFAULT_CONNECTION_FACTORY; - /** Configures connections for AuthenticatedURL */ - private final ConnectionConfigurator CONN_CONFIGURATOR = - new ConnectionConfigurator() { - @Override - public HttpURLConnection configure(HttpURLConnection conn) - throws IOException { - connectionFactory.setTimeouts(conn); - return conn; - } - }; + /** Delegation token kind */ public static final Text TOKEN_KIND = new Text("WEBHDFS delegation"); /** Token selector */ @@ -504,16 +491,7 @@ public class WebHdfsFileSystem extends F throws IOException { final HttpURLConnection conn; try { - if (op.getRequireAuth()) { - LOG.debug("open AuthenticatedURL connection"); - UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab(); - final AuthenticatedURL.Token authToken = new AuthenticatedURL.Token(); - conn = new AuthenticatedURL(AUTH, CONN_CONFIGURATOR).openConnection( - url, authToken); - } else { - LOG.debug("open URL connection"); - conn = (HttpURLConnection)connectionFactory.openConnection(url); - } + conn = (HttpURLConnection) connectionFactory.openConnection(op, url); } catch (AuthenticationException e) { throw new IOException(e); } @@ -635,8 +613,10 @@ public class WebHdfsFileSystem extends F checkRetry = false; //Step 2) Submit another Http request with the URL from the Location header with data. - conn = (HttpURLConnection)connectionFactory.openConnection(new URL(redirect)); - conn.setRequestProperty("Content-Type", MediaType.APPLICATION_OCTET_STREAM); + conn = (HttpURLConnection) connectionFactory.openConnection(new URL( + redirect)); + conn.setRequestProperty("Content-Type", + MediaType.APPLICATION_OCTET_STREAM); conn.setChunkedStreamingMode(32 << 10); //32kB-chunk connect(); return conn; @@ -658,7 +638,8 @@ public class WebHdfsFileSystem extends F disconnect(); checkRetry = false; - conn = (HttpURLConnection)connectionFactory.openConnection(new URL(redirect)); + conn = (HttpURLConnection) connectionFactory.openConnection(new URL( + redirect)); connect(); } @@ -892,12 +873,6 @@ public class WebHdfsFileSystem extends F .write(bufferSize); } - @SuppressWarnings("deprecation") - @Override - public boolean delete(final Path f) throws IOException { - return delete(f, true); - } - @Override public boolean delete(Path f, boolean recursive) throws IOException { final HttpOpParam.Op op = DeleteOpParam.Op.DELETE; Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java?rev=1535534&r1=1535533&r2=1535534&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java Thu Oct 24 20:33:04 2013 @@ -25,9 +25,11 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; +import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; +import java.net.SocketAddress; import java.net.SocketTimeoutException; import java.nio.channels.SocketChannel; import java.util.ArrayList; @@ -41,6 +43,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; @@ -71,8 +74,9 @@ public class TestWebHdfsTimeouts { @Before public void setUp() throws Exception { Configuration conf = WebHdfsTestUtil.createConf(); - nnHttpAddress = NameNode.getHttpAddress(conf); - serverSocket = new ServerSocket(nnHttpAddress.getPort(), CONNECTION_BACKLOG); + serverSocket = new ServerSocket(0, CONNECTION_BACKLOG); + nnHttpAddress = new InetSocketAddress("localhost", serverSocket.getLocalPort()); + conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "localhost:" + serverSocket.getLocalPort()); fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf); fs.connectionFactory = connectionFactory; clients = new ArrayList<SocketChannel>();