Author: cdouglas
Date: Thu Mar 26 01:41:57 2009
New Revision: 758495
URL: http://svn.apache.org/viewvc?rev=758495&view=rev
Log:
HADOOP-5363. Add support for proxying connections to multiple clusters with
different versions to hdfsproxy. Contributed by Zhiyong Zhang
Added:
hadoop/core/trunk/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/SimpleServlet.java
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyForwardServlet.java
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-site.xml
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/contrib/hdfsproxy/build.xml
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/cactus-web.xml
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java
Modified: hadoop/core/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=758495&r1=758494&r2=758495&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Thu Mar 26 01:41:57 2009
@@ -63,6 +63,9 @@
HADOOP-4539. Introduce backup node and checkpoint node. (shv)
+ HADOOP-5363. Add support for proxying connections to multiple clusters with
+ different versions to hdfsproxy. (Zhiyong Zhang via cdouglas)
+
IMPROVEMENTS
HADOOP-4565. Added CombineFileInputFormat to use data locality information
Modified: hadoop/core/trunk/src/contrib/hdfsproxy/build.xml
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/build.xml?rev=758495&r1=758494&r2=758495&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/build.xml (original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/build.xml Thu Mar 26 01:41:57 2009
@@ -18,7 +18,7 @@
-->
<project name="hdfsproxy" default="jar" xmlns:ivy="antlib:org.apache.ivy.ant">
- <property name="hdfsproxyVersion" value="1.0"/>
+ <property name="hdfsproxyVersion" value="2.0"/>
<property name="final.name"
value="${ant.project.name}-${hdfsproxyVersion}"/>
<property name="javac.debug" value="on"/>
<property name="javac.optimize" value="on"/>
@@ -44,8 +44,7 @@
<property name="tomcat.container.id" value="tomcat5x"/>
<property name="cargo.logging" value="high"/>
<property name="cactus.formatter.type" value="xml"/>
- <property name="cactus.warfile.name" value="test"/>
-
+ <property name="cactus.warfile.name" value="test"/>
<available file="${hadoop.root}/build/classes" type="dir"
property="test.available"/>
<property environment="env"/>
@@ -62,6 +61,7 @@
<or>
<equals arg1="${testcase}" arg2="TestProxyFilter" />
<equals arg1="${testcase}" arg2="TestProxyUtil"
/>
+ <equals arg1="${testcase}"
arg2="TestProxyForwardServlet" />
<not>
<isset property="testcase"/>
</not>
@@ -103,7 +103,6 @@
<attribute name="Implementation-Vendor"
value="Apache"/>
</section>
</manifest>
-
</jar>
</target>
@@ -112,7 +111,7 @@
<!-- Make war file -->
<!-- ==================================================================
-->
- <target name="war" depends="local-package" description="Create war">
+ <target name="war" depends="compile" description="Create war">
<echo>
Building the .war file
</echo>
@@ -129,9 +128,29 @@
<classes dir="${proxy.conf.dir}" excludes="**/*.example
**/*.template **/*.sh hadoop-site.xml"/>
<classes dir="${build.classes}"/>
<classes dir="${hadoop.root}/build/classes"/>
- <classes dir="${test.build.dir}"/>
</war>
</target>
+
+ <target name="forward" depends="compile" description="Create forward
war">
+ <echo>
+ Building the forward war file
+ </echo>
+ <war destfile="${build.dir}/${final.name}-forward.war"
webxml="${basedir}/conf/tomcat-forward-web.xml">
+ <lib dir="${common.ivy.lib.dir}">
+ <include name="commons-logging-${commons-logging.version}.jar"/>
+ <include name="junit-${junit.version}.jar"/>
+ <include name="log4j-${log4j.version}.jar"/>
+ <include name="slf4j-api-${slf4j-api.version}.jar"/>
+ <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
+ <include name="xmlenc-${xmlenc.version}.jar"/>
+ <include name="core-${core.vesion}.jar"/>
+ </lib>
+ <classes dir="${proxy.conf.dir}" excludes="**/*.example
**/*.template **/*.sh hadoop-site.xml"/>
+ <classes dir="${build.classes}"/>
+ <classes dir="${hadoop.root}/build/classes"/>
+ </war>
+ </target>
+
<target name="cactifywar"
depends="war,load-tasks,cactifywar-pure,cactifywar-clover" description="To
include clover coverage test use -Dclover.home ..."/>
<target name="cactifywar-pure" depends="war,load-tasks"
unless="useClover">
@@ -145,6 +164,7 @@
mapping="/ServletRedirectorSecure" roles="test"/>
<filterredirector mapping="/test/filterRedirector.jsp"/>
<classes dir="${proxy.conf.test}" excludes="**/*.template **/*.sh"/>
+ <classes dir="${test.build.dir}"/>
</cactifywar>
</target>
@@ -159,6 +179,7 @@
mapping="/ServletRedirectorSecure" roles="test"/>
<filterredirector mapping="/test/filterRedirector.jsp"/>
<classes dir="${proxy.conf.test}" excludes="**/*.template **/*.sh"/>
+ <classes dir="${test.build.dir}"/>
<lib dir="${clover.home}/lib">
<include name="clover.jar"/>
</lib>
@@ -196,6 +217,7 @@
<include name="**/${testcase}.java"/>
<exclude name="**/TestProxyFilter.java"/>
<exclude name="**/TestProxyUtil.java"/>
+ <exclude name="**/TestProxyForwardServlet.java"/>
</fileset>
</batchtest>
</junit>
@@ -304,7 +326,7 @@
<!-- ==================================================================
-->
<!--
-->
<!-- ==================================================================
-->
- <target name="local-package" depends="jar" description="Package in
local build directory">
+ <target name="local-package" depends="jar,war" description="Package in
local build directory">
<mkdir dir="${build.dir}/${final.name}"/>
<mkdir dir="${build.dir}/${final.name}/logs"/>
<copy todir="${build.dir}/${final.name}"
includeEmptyDirs="false">
@@ -367,22 +389,22 @@
</chmod>
</target>
-
<target name="package" depends="local-package" description="Build
distribution">
<mkdir dir="${dist.dir}/contrib/${name}"/>
<copy todir="${dist.dir}/contrib/${name}">
<fileset dir="${build.dir}/${final.name}">
<exclude name="**/lib/**" />
<exclude name="**/src/**" />
+ <exclude name="*.war" />
</fileset>
</copy>
- <chmod dir="${dist.dir}/contrib/${name}/bin" perm="a+x" includes="*"/>
+ <chmod dir="${dist.dir}/contrib/${name}/bin" perm="a+x" includes="*"/>
</target>
<!-- ==================================================================
-->
<!-- Make release tarball
-->
<!-- ==================================================================
-->
- <target name="tar" depends="local-package" description="Make release
tarball">
+ <target name="tar" depends="local-package,war" description="Make
release tarball">
<macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
<param.listofitems>
<tarfileset dir="${build.dir}" mode="664">
@@ -396,7 +418,7 @@
</macro_tar>
</target>
- <target name="binary" depends="local-package" description="Make tarball
without source and documentation">
+ <target name="binary" depends="local-package,war" description="Make
tarball without source and documentation">
<macro_tar
param.destfile="${build.dir}/${final.name}-bin.tar.gz">
<param.listofitems>
<tarfileset dir="${build.dir}" mode="664">
Added: hadoop/core/trunk/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml?rev=758495&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml Thu Mar
26 01:41:57 2009
@@ -0,0 +1,110 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!DOCTYPE web-app
+ PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
+ "http://java.sun.com/dtd/web-app_2_3.dtd">
+
+<web-app>
+
+
+ <!-- General description of your web application -->
+
+ <display-name>HDFS Proxy</display-name>
+ <description>
+ get data from grid forward war
+ </description>
+
+ <context-param>
+ <param-name>webmaster</param-name>
+ <param-value>[email protected]</param-value>
+ <description>
+ The EMAIL address of the administrator to whom questions
+ and comments about this application should be addressed.
+ </description>
+ </context-param>
+
+ <filter>
+ <filter-name>proxyFilter</filter-name>
+
<filter-class>org.apache.hadoop.hdfsproxy.ProxyFilter</filter-class>
+ <init-param>
+ <param-name>filteraddress</param-name>
+ <param-value>10</param-value>
+ </init-param>
+ </filter>
+
+ <filter-mapping>
+ <filter-name>proxyFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+
+
+ <servlet>
+ <servlet-name>proxyForward</servlet-name>
+ <description>forward data access to specifc servlets</description>
+
<servlet-class>org.apache.hadoop.hdfsproxy.ProxyForwardServlet</servlet-class>
+ </servlet>
+
+ <servlet-mapping>
+ <servlet-name>proxyForward</servlet-name>
+ <url-pattern>/listPaths/*</url-pattern>
+ </servlet-mapping>
+ <servlet-mapping>
+ <servlet-name>proxyForward</servlet-name>
+ <url-pattern>/data/*</url-pattern>
+ </servlet-mapping>
+ <servlet-mapping>
+ <servlet-name>proxyForward</servlet-name>
+ <url-pattern>/streamFile/*</url-pattern>
+ </servlet-mapping>
+
+ <servlet>
+ <servlet-name>fileForward</servlet-name>
+ <description>forward file data access to streamFile</description>
+
<servlet-class>org.apache.hadoop.hdfsproxy.ProxyFileForward</servlet-class>
+ </servlet>
+
+ <servlet-mapping>
+ <servlet-name>fileForward</servlet-name>
+ <url-pattern>/file/*</url-pattern>
+ </servlet-mapping>
+
+
+ <welcome-file-list>
+ <welcome-file>index.html</welcome-file>
+ </welcome-file-list>
+
+ <!-- Define the default session timeout for your application,
+ in minutes. From a servlet or JSP page, you can modify
+ the timeout for a particular session dynamically by using
+ HttpSession.getMaxInactiveInterval(). -->
+
+ <session-config>
+ <session-timeout>30</session-timeout> <!-- 30 minutes -->
+ </session-config>
+
+
+</web-app>
+
+
+
+
+
+
+
+
Modified:
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java?rev=758495&r1=758494&r2=758495&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
(original)
+++
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
Thu Mar 26 01:41:57 2009
@@ -21,8 +21,12 @@
import java.net.URI;
import java.net.URISyntaxException;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.server.namenode.FileDataServlet;
@@ -32,6 +36,15 @@
public class ProxyFileDataServlet extends FileDataServlet {
/** For java.io.Serializable */
private static final long serialVersionUID = 1L;
+
+ /** {...@inheritdoc} */
+ @Override
+ public void init() throws ServletException {
+ ServletContext context = getServletContext();
+ if (context.getAttribute("name.conf") == null) {
+ context.setAttribute("name.conf", new Configuration());
+ }
+ }
/** {...@inheritdoc} */
@Override
@@ -46,6 +59,8 @@
/** {...@inheritdoc} */
@Override
protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
- return (UnixUserGroupInformation) request.getAttribute("authorized.ugi");
+ String userID = (String)
request.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+ UnixUserGroupInformation ugi = ProxyUgiManager.getUgiForUser(userID);
+ return ugi;
}
}
Added:
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java?rev=758495&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java
(added)
+++
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java
Thu Mar 26 01:41:57 2009
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfsproxy;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.hadoop.security.UnixUserGroupInformation;
+
+
+public class ProxyFileForward extends ProxyForwardServlet {
+ /** For java.io.Serializable */
+ private static final long serialVersionUID = 1L;
+
+ /** {...@inheritdoc} */
+ @Override
+ protected String buildForwardPath(HttpServletRequest request, String
pathInfo) {
+ String path = "/streamFile";
+ path += "?filename=" + request.getPathInfo();
+ UnixUserGroupInformation ugi =
(UnixUserGroupInformation)request.getAttribute("authorized.ugi");
+ if (ugi != null) {
+ path += "&ugi=" + ugi.toString();
+ }
+ return path;
+ }
+
+}
Modified:
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java?rev=758495&r1=758494&r2=758495&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java
(original)
+++
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java
Thu Mar 26 01:41:57 2009
@@ -61,14 +61,14 @@
.compile("^(/clearUgiCache)$");
/** Pattern for a filter to find out if a request is HFTP/HSFTP request */
protected static final Pattern HFTP_PATTERN = Pattern
- .compile("^(/listPaths|/data|/streamFile)$");
+ .compile("^(/listPaths|/data|/streamFile|/file)$");
/**
* Pattern for a filter to find out if an HFTP/HSFTP request stores its file
* path in the extra path information associated with the URL; if not, the
* file path is stored in request parameter "filename"
*/
protected static final Pattern FILEPATH_PATTERN = Pattern
- .compile("^(/listPaths|/data)$");
+ .compile("^(/listPaths|/data|/file)$");
private static volatile Map<String, Set<Path>> permsMap;
private static volatile Map<String, Set<BigInteger>> certsMap;
@@ -88,14 +88,16 @@
Configuration conf = new Configuration(false);
conf.addResource("hdfsproxy-default.xml");
conf.addResource("ssl-server.xml");
+ conf.addResource("hdfsproxy-site.xml");
String nn = conf.get("hdfsproxy.dfs.namenode.address");
if (nn == null) {
throw new ServletException("Proxy source cluster name node address not
speficied");
}
InetSocketAddress nAddr = NetUtils.createSocketAddr(nn);
context.setAttribute("name.node.address", nAddr);
- context.setAttribute("name.conf", new Configuration());
-
+ context.setAttribute("name.conf", new Configuration());
+
+ context.setAttribute("org.apache.hadoop.hdfsproxy.conf", conf);
LOG.info("proxyFilter initialization success: " + nn);
}
@@ -165,7 +167,7 @@
HttpServletRequest rqst = (HttpServletRequest) request;
HttpServletResponse rsp = (HttpServletResponse) response;
-
+
if (LOG.isDebugEnabled()) {
StringBuilder b = new StringBuilder("Request from ").append(
rqst.getRemoteHost()).append("/").append(rqst.getRemoteAddr())
@@ -264,7 +266,10 @@
userID = userID.substring(3);
String servletPath = rqst.getServletPath();
- if (unitTest) servletPath = rqst.getParameter("TestSevletPathInfo");
+ if (unitTest) {
+ servletPath = rqst.getParameter("TestSevletPathInfo");
+ LOG.info("this is for unit test purpose only");
+ }
if (HFTP_PATTERN.matcher(servletPath).matches()) {
// request is an HSFTP request
@@ -317,11 +322,16 @@
return;
}
rqst.setAttribute("authorized.ugi", ugi);
+ rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID",
userID);
} else if(rqst.getScheme().equalsIgnoreCase("http")) { // http request,
set ugi for servlets, only for testing purposes
String ugi = rqst.getParameter("ugi");
if (ugi != null) {
rqst.setAttribute("authorized.ugi", new UnixUserGroupInformation(ugi
.split(",")));
+ String[] ugiStr = ugi.split(",");
+ if(ugiStr.length > 0) {
+ rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID",
ugiStr[0]);
+ }
}
}
chain.doFilter(request, response);
Added:
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java?rev=758495&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
(added)
+++
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
Thu Mar 26 01:41:57 2009
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfsproxy;
+
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import javax.servlet.ServletException;
+import javax.servlet.ServletContext;
+import javax.servlet.RequestDispatcher;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UnixUserGroupInformation;
+
+/**
+ *
+ *
+ */
+public class ProxyForwardServlet extends HttpServlet {
+ /**
+ *
+ */
+ private static final long serialVersionUID = 1L;
+ private static Configuration configuration = null;
+ public static final Log LOG = LogFactory.getLog(ProxyForwardServlet.class);
+
+ /** {...@inheritdoc} */
+ @Override
+ public void init() throws ServletException {
+ ServletContext context = getServletContext();
+ configuration = (Configuration)
context.getAttribute("org.apache.hadoop.hdfsproxy.conf");
+ }
+
+ /** {...@inheritdoc} */
+ @Override
+ public void doGet(HttpServletRequest request, HttpServletResponse response)
+ throws IOException, ServletException {
+ String hostname = request.getServerName();
+
+ String version = configuration.get(hostname);
+ if (version != null) {
+ ServletContext curContext = getServletContext();
+ ServletContext dstContext = curContext.getContext(version);
+
+ if (dstContext == null) {
+ LOG.info("Context non-exist or restricted from access: " + version);
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ }
+ LOG.debug("Request to " + hostname + " is forwarded to version " +
version);
+ forwardRequest(request, response, dstContext, request.getServletPath());
+
+ } else {
+ LOG.info("not a valid context path");
+ response.sendError(HttpServletResponse.SC_NOT_IMPLEMENTED);
+ }
+ }
+ /** {...@inheritdoc} */
+ public void forwardRequest(HttpServletRequest request, HttpServletResponse
response, ServletContext context, String pathInfo)
+ throws IOException, ServletException{
+ String path = buildForwardPath(request, pathInfo);
+ RequestDispatcher dispatcher = context.getRequestDispatcher(path);
+ if (dispatcher == null) {
+ LOG.info("There was no such dispatcher");
+ response.sendError(HttpServletResponse.SC_NO_CONTENT);
+ return;
+ }
+ dispatcher.forward(request, response);
+ }
+
+ /** {...@inheritdoc} */
+ protected String buildForwardPath(HttpServletRequest request, String
pathInfo) {
+ String path = pathInfo;
+ if (request.getPathInfo() != null) {
+ path += request.getPathInfo();
+ }
+ if (request.getQueryString() != null) {
+ path += "?" + request.getQueryString();
+ }
+ return path;
+ }
+}
Modified:
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java?rev=758495&r1=758494&r2=758495&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
(original)
+++
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
Thu Mar 26 01:41:57 2009
@@ -17,8 +17,11 @@
*/
package org.apache.hadoop.hdfsproxy;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.server.namenode.ListPathsServlet;
import org.apache.hadoop.security.UnixUserGroupInformation;
@@ -26,10 +29,21 @@
public class ProxyListPathsServlet extends ListPathsServlet {
/** For java.io.Serializable */
private static final long serialVersionUID = 1L;
+
+ /** {...@inheritdoc} */
+ @Override
+ public void init() throws ServletException {
+ ServletContext context = getServletContext();
+ if (context.getAttribute("name.conf") == null) {
+ context.setAttribute("name.conf", new Configuration());
+ }
+ }
/** {...@inheritdoc} */
@Override
protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
- return (UnixUserGroupInformation) request.getAttribute("authorized.ugi");
+ String userID = (String)
request.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+ UnixUserGroupInformation ugi = ProxyUgiManager.getUgiForUser(userID);
+ return ugi;
}
}
Modified:
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java?rev=758495&r1=758494&r2=758495&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
(original)
+++
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
Thu Mar 26 01:41:57 2009
@@ -21,6 +21,7 @@
import java.net.InetSocketAddress;
import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import org.apache.hadoop.hdfs.DFSClient;
@@ -32,6 +33,14 @@
public class ProxyStreamFile extends StreamFile {
/** For java.io.Serializable */
private static final long serialVersionUID = 1L;
+ /** {...@inheritdoc} */
+ @Override
+ public void init() throws ServletException {
+ ServletContext context = getServletContext();
+ if (context.getAttribute("name.conf") == null) {
+ context.setAttribute("name.conf", new Configuration());
+ }
+ }
/** {...@inheritdoc} */
@Override
@@ -50,6 +59,8 @@
/** {...@inheritdoc} */
@Override
protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
- return (UnixUserGroupInformation) request.getAttribute("authorized.ugi");
+ String userID = (String)
request.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+ UnixUserGroupInformation ugi = ProxyUgiManager.getUgiForUser(userID);
+ return ugi;
}
}
Modified:
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java?rev=758495&r1=758494&r2=758495&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
(original)
+++
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
Thu Mar 26 01:41:57 2009
@@ -19,6 +19,8 @@
package org.apache.hadoop.hdfsproxy;
import java.io.IOException;
+import java.io.InputStream;
+import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
@@ -35,6 +37,10 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSInputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.HostsFileReader;
@@ -46,7 +52,7 @@
public static final Log LOG = LogFactory.getLog(ProxyUtil.class);
private static enum UtilityOption {
- RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache");
+ RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache"), GET("-get");
private String name = null;
@@ -163,14 +169,48 @@
}
return true;
}
+
+
+ static FSDataInputStream open(Configuration conf, String hostname, int port,
String path) throws IOException {
+ setupSslProps(conf);
+ HttpURLConnection connection = null;
+ connection = openConnection(hostname, port, path);
+ connection.connect();
+ final InputStream in = connection.getInputStream();
+ return new FSDataInputStream(new FSInputStream() {
+ public int read() throws IOException {
+ return in.read();
+ }
+ public int read(byte[] b, int off, int len) throws IOException {
+ return in.read(b, off, len);
+ }
+
+ public void close() throws IOException {
+ in.close();
+ }
+
+ public void seek(long pos) throws IOException {
+ throw new IOException("Can't seek!");
+ }
+ public long getPos() throws IOException {
+ throw new IOException("Position unknown!");
+ }
+ public boolean seekToNewSource(long targetPos) throws IOException {
+ return false;
+ }
+ });
+ }
public static void main(String[] args) throws Exception {
- if(args.length != 1 ||
+ if(args.length < 1 ||
(!UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])
- && !UtilityOption.CLEAR.getName().equalsIgnoreCase(args[0]))) {
+ && !UtilityOption.CLEAR.getName().equalsIgnoreCase(args[0])
+ && !UtilityOption.GET.getName().equalsIgnoreCase(args[0])) ||
+ (UtilityOption.GET.getName().equalsIgnoreCase(args[0]) &&
args.length != 4)) {
System.err.println("Usage: ProxyUtil ["
+ UtilityOption.RELOAD.getName() + "] | ["
- + UtilityOption.CLEAR.getName() + "]");
+ + UtilityOption.CLEAR.getName() + "] | ["
+ + UtilityOption.GET.getName() + " <hostname> <#port> <path> ]");
System.exit(0);
}
Configuration conf = new Configuration(false);
@@ -179,10 +219,17 @@
if (UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])) {
// reload user-certs.xml and user-permissions.xml files
- boolean error = sendCommand(conf, "/reloadPermFiles");
- } else {
+ sendCommand(conf, "/reloadPermFiles");
+ } else if (UtilityOption.CLEAR.getName().equalsIgnoreCase(args[0])) {
// clear UGI caches
- boolean error = sendCommand(conf, "/clearUgiCache");
+ sendCommand(conf, "/clearUgiCache");
+ } else {
+ String hostname = args[1];
+ int port = Integer.parseInt(args[2]);
+ String path = args[3];
+ InputStream in = open(conf, hostname, port, path);
+ IOUtils.copyBytes(in, System.out, conf, false);
+ in.close();
}
}
Added:
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/SimpleServlet.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/SimpleServlet.java?rev=758495&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/SimpleServlet.java
(added)
+++
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/SimpleServlet.java
Thu Mar 26 01:41:57 2009
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import java.io.PrintWriter;
+import java.io.IOException;
+
+
+/**
+ * simple servlet for forward testing purpose
+ */
+
+public class SimpleServlet extends HttpServlet {
+
+ /**
+ *
+ */
+ private static final long serialVersionUID = 1L;
+
+ public void doGet(HttpServletRequest request, HttpServletResponse response)
+ throws IOException {
+ response.setContentType("text/html");
+ PrintWriter out = response.getWriter();
+ out.print("<html><head/><body>");
+ out.print("A GET request");
+ out.print("</body></html>");
+ out.close();
+ return;
+ }
+
+}
Added:
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyForwardServlet.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyForwardServlet.java?rev=758495&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyForwardServlet.java
(added)
+++
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyForwardServlet.java
Thu Mar 26 01:41:57 2009
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import org.apache.cactus.ServletTestCase;
+import org.apache.cactus.WebRequest;
+import org.apache.cactus.WebResponse;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import java.io.IOException;
+import javax.servlet.ServletException;
+
+/** Unit tests for ProxyUtil */
+public class TestProxyForwardServlet extends ServletTestCase {
+ public static final Log LOG =
LogFactory.getLog(TestProxyForwardServlet.class);
+
+
+ public void beginDoGet(WebRequest theRequest) {
+ theRequest.setURL("proxy-test:0", null, "/simple", null, null);
+ }
+
+ public void testDoGet() throws IOException, ServletException {
+ ProxyForwardServlet servlet = new ProxyForwardServlet();
+
+ servlet.init(config);
+ servlet.doGet(request, response);
+ }
+
+ public void endDoGet(WebResponse theResponse)
+ throws IOException {
+ String expected = "<html><head/><body>A GET request</body></html>";
+ String result = theResponse.getText();
+
+ assertEquals(expected, result);
+ }
+
+
+ public void testForwardRequest() throws Exception {
+ ProxyForwardServlet servlet = new ProxyForwardServlet();
+
+ servlet.forwardRequest(request, response, config.getServletContext(),
"/simple");
+ }
+
+ public void endForwardRequest(WebResponse theResponse) throws IOException {
+ String expected = "<html><head/><body>A GET request</body></html>";
+ String result = theResponse.getText();
+
+ assertEquals(expected, result);
+
+ }
+
+}
Modified:
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/cactus-web.xml
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/cactus-web.xml?rev=758495&r1=758494&r2=758495&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/cactus-web.xml
(original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/cactus-web.xml
Thu Mar 26 01:41:57 2009
@@ -57,10 +57,21 @@
<param-value>value1 used for testing</param-value>
</init-param>
</servlet>
-
+
+ <servlet>
+ <servlet-name>Simple</servlet-name>
+ <description> A Simple Servlet </description>
+ <servlet-class>org.apache.hadoop.hdfsproxy.SimpleServlet</servlet-class>
+ </servlet>
+
<servlet-mapping>
<servlet-name>ServletRedirector_TestOverride</servlet-name>
<url-pattern>/ServletRedirectorOverride</url-pattern>
</servlet-mapping>
+
+ <servlet-mapping>
+ <servlet-name>Simple</servlet-name>
+ <url-pattern>/simple/*</url-pattern>
+ </servlet-mapping>
</web-app>
Added:
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-site.xml
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-site.xml?rev=758495&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-site.xml
(added)
+++
hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-site.xml
Thu Mar 26 01:41:57 2009
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put hdfsproxy specific properties in this file. -->
+
+<configuration>
+<property>
+ <name>proxy-test</name>
+ <value>/test</value>
+ <description>one hostname corresponds to one web application archive
+ </description>
+</property>
+
+</configuration>
+
Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=758495&r1=758494&r2=758495&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
(original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java Thu
Mar 26 01:41:57 2009
@@ -91,26 +91,15 @@
nnAddr = NetUtils.createSocketAddr(name.toString());
}
- /** randomly pick one from all available IP addresses of a given hostname */
- protected String pickOneAddress(String hostname) throws UnknownHostException
{
- if ("localhost".equals(hostname))
- return hostname;
- InetAddress[] addrs = InetAddress.getAllByName(hostname);
- if (addrs.length > 1)
- return addrs[ran.nextInt(addrs.length)].getHostAddress();
- return addrs[0].getHostAddress();
- }
@Override
public URI getUri() {
try {
- return new URI("hftp", null, pickOneAddress(nnAddr.getHostName()),
nnAddr.getPort(),
+ return new URI("hftp", null, nnAddr.getHostName(), nnAddr.getPort(),
null, null, null);
} catch (URISyntaxException e) {
return null;
- } catch (UnknownHostException e) {
- return null;
- }
+ }
}
/**
@@ -121,7 +110,7 @@
protected HttpURLConnection openConnection(String path, String query)
throws IOException {
try {
- final URL url = new URI("http", null,
pickOneAddress(nnAddr.getHostName()),
+ final URL url = new URI("http", null, nnAddr.getHostName(),
nnAddr.getPort(), path, query, null).toURL();
if (LOG.isTraceEnabled()) {
LOG.trace("url=" + url);
Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java?rev=758495&r1=758494&r2=758495&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java
(original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java Thu
Mar 26 01:41:57 2009
@@ -69,7 +69,7 @@
protected HttpURLConnection openConnection(String path, String query)
throws IOException {
try {
- final URL url = new URI("https", null,
pickOneAddress(nnAddr.getHostName()),
+ final URL url = new URI("https", null, nnAddr.getHostName(),
nnAddr.getPort(), path, query, null).toURL();
HttpsURLConnection conn = (HttpsURLConnection)url.openConnection();
// bypass hostname verification
@@ -83,13 +83,11 @@
@Override
public URI getUri() {
try {
- return new URI("hsftp", null, pickOneAddress(nnAddr.getHostName()),
nnAddr.getPort(),
+ return new URI("hsftp", null, nnAddr.getHostName(), nnAddr.getPort(),
null, null, null);
} catch (URISyntaxException e) {
return null;
- } catch (UnknownHostException e) {
- return null;
- }
+ }
}
/**