瀏覽代碼

HDFS-447. Add LDAP lookup to hdfsproxy. Contributed by Zhiyong Zhang

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hdfs/trunk@788898 13f79535-47bb-0310-9956-ffa450edef68
Christopher Douglas 16 年之前
父節點
當前提交
124446ae27

+ 2 - 0
CHANGES.txt

@@ -7,6 +7,8 @@ Trunk (unreleased changes)
     HDFS-436. Introduce AspectJ framework for HDFS code and tests.
     HDFS-436. Introduce AspectJ framework for HDFS code and tests.
     (Konstantin Boudnik via szetszwo)
     (Konstantin Boudnik via szetszwo)
 
 
+    HDFS-447. Add LDAP lookup to hdfsproxy. (Zhiyong Zhang via cdouglas)
+
   IMPROVEMENTS
   IMPROVEMENTS
 
 
     HDFS-381. Remove blocks from DataNode maps when corresponding file
     HDFS-381. Remove blocks from DataNode maps when corresponding file

+ 55 - 29
src/contrib/hdfsproxy/build.xml

@@ -60,6 +60,7 @@
   	<and>
   	<and>
 	    <or>
 	    <or>
 	    	<equals arg1="${testcase}" arg2="TestProxyFilter" />
 	    	<equals arg1="${testcase}" arg2="TestProxyFilter" />
+	    	<equals arg1="${testcase}" arg2="TestLdapIpDirFilter" />
 				<equals arg1="${testcase}" arg2="TestProxyUtil" />
 				<equals arg1="${testcase}" arg2="TestProxyUtil" />
 				<equals arg1="${testcase}" arg2="TestProxyForwardServlet" />
 				<equals arg1="${testcase}" arg2="TestProxyForwardServlet" />
 				<not>
 				<not>
@@ -118,22 +119,21 @@
 	  <war destfile="${build.dir}/${final.name}.war" webxml="${basedir}/conf/tomcat-web.xml">
 	  <war destfile="${build.dir}/${final.name}.war" webxml="${basedir}/conf/tomcat-web.xml">
 	    <lib dir="${common.ivy.lib.dir}">
 	    <lib dir="${common.ivy.lib.dir}">
 	      <include name="commons-logging-${commons-logging.version}.jar"/>
 	      <include name="commons-logging-${commons-logging.version}.jar"/>
-              <include name="junit-${junit.version}.jar"/>
-              <include name="log4j-${log4j.version}.jar"/>
-              <include name="slf4j-api-${slf4j-api.version}.jar"/>
-              <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
-              <include name="xmlenc-${xmlenc.version}.jar"/>
-              <include name="core-${core.version}.jar"/> 
+        <include name="junit-${junit.version}.jar"/>
+        <include name="log4j-${log4j.version}.jar"/>
+        <include name="slf4j-api-${slf4j-api.version}.jar"/>
+        <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
+        <include name="xmlenc-${xmlenc.version}.jar"/>
+        <include name="core-${core.vesion}.jar"/> 
+	    </lib>
+	    <lib dir="${hadoop.root}/lib">
+	    	<include name="hadoop-core-${hadoop-version}.jar"/>
 	    </lib>
 	    </lib>
-            <lib dir="${hadoop.root}/lib">
-              <include name="hadoop-mapred-tools-${hadoop-version}.jar"/>
-              <include name="hadoop-mapred-examples-${hadoop-version}.jar"/>
-              <include name="hadoop-mapred-test-${hadoop-version}.jar"/>
-              <include name="hadoop-core-test-${hadoop-version}.jar"/>
-              <include name="hadoop-core-${hadoop-version}.jar"/>
-              <include name="hadoop-mapred-test-${hadoop-version}.jar"/>
-            </lib>  
-	    <classes dir="${proxy.conf.dir}" excludes="**/*.example **/*.template **/*.sh hadoop-site.xml"/>
+	    <classes dir="${proxy.conf.dir}">
+	    	<include name="hdfsproxy-default.xml"/>
+	    	<include name="user-certs.xml"/>
+	    	<include name="user-permissions.xml"/>
+	    </classes>
 	    <classes dir="${build.classes}"/>
 	    <classes dir="${build.classes}"/>
 	    <classes dir="${hadoop.root}/build/classes"/>
 	    <classes dir="${hadoop.root}/build/classes"/>
 	  </war>
 	  </war>
@@ -153,40 +153,69 @@
         <include name="xmlenc-${xmlenc.version}.jar"/>
         <include name="xmlenc-${xmlenc.version}.jar"/>
         <include name="core-${core.vesion}.jar"/> 
         <include name="core-${core.vesion}.jar"/> 
 	    </lib>
 	    </lib>
-	    <classes dir="${proxy.conf.dir}" excludes="**/*.example **/*.template **/*.sh hadoop-site.xml"/>
+	    <lib dir="${hadoop.root}/lib">
+		<include name="hadoop-core-${hadoop-version}.jar"/>
+	    </lib>
+	    <classes dir="${proxy.conf.dir}">
+	    	<include name="hdfsproxy-default.xml"/>
+	    	<include name="hdfsproxy-site.xml"/>
+	    	<include name="user-certs.xml"/>
+	    	<include name="user-permissions.xml"/>
+	    </classes>
+	    <classes dir="${build.classes}"/>
+	    <classes dir="${hadoop.root}/build/classes"/>
+	  </war>
+	</target>
+	
+	<target name="testwar" depends="compile" description="Create testing war">
+		<echo>
+			Building the testing .war file 
+		</echo>
+	  <war destfile="${build.dir}/${final.name}-test.war" webxml="${src.test.resources}/tomcat-web.xml">
+	    <lib dir="${common.ivy.lib.dir}">
+	      <include name="commons-logging-${commons-logging.version}.jar"/>
+        <include name="junit-${junit.version}.jar"/>
+        <include name="log4j-${log4j.version}.jar"/>
+        <include name="slf4j-api-${slf4j-api.version}.jar"/>
+        <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
+        <include name="xmlenc-${xmlenc.version}.jar"/>
+        <include name="core-${core.vesion}.jar"/> 
+	    </lib>
+	    <lib dir="${hadoop.root}/lib">
+	    	<include name="hadoop-core-${hadoop-version}.jar"/>
+	    </lib>
+	    <classes dir="${proxy.conf.test}" excludes="**/*.template **/*.sh"/>
 	    <classes dir="${build.classes}"/>
 	    <classes dir="${build.classes}"/>
 	    <classes dir="${hadoop.root}/build/classes"/>
 	    <classes dir="${hadoop.root}/build/classes"/>
 	  </war>
 	  </war>
 	</target>	
 	</target>	
 	
 	
-	<target name="cactifywar" depends="war,load-tasks,cactifywar-pure,cactifywar-clover" description="To include clover coverage test use -Dclover.home ..."/>
+	<target name="cactifywar" depends="testwar,load-tasks,cactifywar-pure,cactifywar-clover" description="To include clover coverage test use -Dclover.home ..."/>
 	
 	
-	<target name="cactifywar-pure" depends="war,load-tasks" unless="useClover">
+	<target name="cactifywar-pure" depends="testwar,load-tasks" unless="useClover">
 		<mkdir dir="${target.dir}" />
 		<mkdir dir="${target.dir}" />
 		<echo> no clover found ...</echo>
 		<echo> no clover found ...</echo>
-    <cactifywar srcfile="${build.dir}/${final.name}.war"
+    <cactifywar srcfile="${build.dir}/${final.name}-test.war"
         destfile="${target.dir}/${cactus.warfile.name}.war"
         destfile="${target.dir}/${cactus.warfile.name}.war"
         mergewebxml="${src.test.resources}/cactus-web.xml">
         mergewebxml="${src.test.resources}/cactus-web.xml">
       <servletredirector/>
       <servletredirector/>
       <servletredirector name="ServletRedirectorSecure"
       <servletredirector name="ServletRedirectorSecure"
           mapping="/ServletRedirectorSecure" roles="test"/>
           mapping="/ServletRedirectorSecure" roles="test"/>
-      <filterredirector mapping="/test/filterRedirector.jsp"/>
-      <classes dir="${proxy.conf.test}" excludes="**/*.template **/*.sh"/>
+      <filterredirector mapping="/test/filterRedirector.jsp"/>      
       <classes dir="${test.build.dir}"/>
       <classes dir="${test.build.dir}"/>
     </cactifywar>    	
     </cactifywar>    	
 	</target>
 	</target>
 
 
-	<target name="cactifywar-clover" depends="war,load-tasks" if="useClover">
+	<target name="cactifywar-clover" depends="testwar,load-tasks" if="useClover">
 		<mkdir dir="${target.dir}" />
 		<mkdir dir="${target.dir}" />
 		<echo> Including clover.jar in the war file ...</echo>
 		<echo> Including clover.jar in the war file ...</echo>
-    <cactifywar srcfile="${build.dir}/${final.name}.war"
+    <cactifywar srcfile="${build.dir}/${final.name}-test.war"
         destfile="${target.dir}/${cactus.warfile.name}.war"
         destfile="${target.dir}/${cactus.warfile.name}.war"
         mergewebxml="${src.test.resources}/cactus-web.xml">
         mergewebxml="${src.test.resources}/cactus-web.xml">
       <servletredirector/>
       <servletredirector/>
       <servletredirector name="ServletRedirectorSecure"
       <servletredirector name="ServletRedirectorSecure"
           mapping="/ServletRedirectorSecure" roles="test"/>
           mapping="/ServletRedirectorSecure" roles="test"/>
       <filterredirector mapping="/test/filterRedirector.jsp"/>
       <filterredirector mapping="/test/filterRedirector.jsp"/>
-      <classes dir="${proxy.conf.test}" excludes="**/*.template **/*.sh"/>
       <classes dir="${test.build.dir}"/>
       <classes dir="${test.build.dir}"/>
       <lib dir="${clover.home}/lib">
       <lib dir="${clover.home}/lib">
       	<include name="clover.jar"/> 
       	<include name="clover.jar"/> 
@@ -224,6 +253,7 @@
             <fileset dir="${src.test}">
             <fileset dir="${src.test}">
             	<include name="**/${testcase}.java"/>
             	<include name="**/${testcase}.java"/>
             	<exclude name="**/TestProxyFilter.java"/>
             	<exclude name="**/TestProxyFilter.java"/>
+            	<exclude name="**/TestLdapIpDirFilter.java"/>
             	<exclude name="**/TestProxyUtil.java"/>
             	<exclude name="**/TestProxyUtil.java"/>
             	<exclude name="**/TestProxyForwardServlet.java"/>
             	<exclude name="**/TestProxyForwardServlet.java"/>
             </fileset>
             </fileset>
@@ -267,7 +297,6 @@
 			<classpath>
 			<classpath>
 				<path refid="cactus.classpath"/>
 				<path refid="cactus.classpath"/>
 				<pathelement location="${build.classes}"/>
 				<pathelement location="${build.classes}"/>
-				<pathelement location="${proxy.conf.dir}"/>
 				<pathelement location="${src.test.resources}"/>
 				<pathelement location="${src.test.resources}"/>
 				<pathelement location="${src.test.resources}/proxy-config"/>
 				<pathelement location="${src.test.resources}/proxy-config"/>
 			</classpath>			
 			</classpath>			
@@ -448,15 +477,12 @@
   	<pathelement location="${proxy.conf.test}" />
   	<pathelement location="${proxy.conf.test}" />
     <pathelement location="${test.build.dir}" />
     <pathelement location="${test.build.dir}" />
     <pathelement location="${hadoop.root}/build/test/classes"/>
     <pathelement location="${hadoop.root}/build/test/classes"/>
-    <pathelement location="${hadoop.root}/lib/hadoop-core-test-${hadoop-version}.jar"/>
-    <pathelement location="${hadoop.root}/build/test/hdfs/classes"/>
-    <pathelement location="${hadoop.root}/lib/hadoop-mapred-test-${hadoop-version}.jar"/>
     <!--<pathelement location="${hadoop.root}/src/contrib/test"/>-->
     <!--<pathelement location="${hadoop.root}/src/contrib/test"/>-->
     <pathelement location="${hadoop.root}/conf"/>
     <pathelement location="${hadoop.root}/conf"/>
     <pathelement location="${hadoop.root}/build"/>
     <pathelement location="${hadoop.root}/build"/>
     <pathelement location="${hadoop.root}/build/classes"/>
     <pathelement location="${hadoop.root}/build/classes"/>
+    <pathelement location="${hadoop.root}/build/tools"/>
     <pathelement location="${build.examples}"/>
     <pathelement location="${build.examples}"/>
-    <pathelement location="${hadoop.root}/lib/hadoop-mapred-tools-${hadoop-version}.jar"/>
     <pathelement path="${clover.jar}"/>
     <pathelement path="${clover.jar}"/>
     <path refid="contrib-classpath"/>
     <path refid="contrib-classpath"/>
   </path>
   </path>

+ 46 - 0
src/contrib/hdfsproxy/conf/hdfsproxy-default.xml

@@ -55,5 +55,51 @@
   </description>
   </description>
 </property>
 </property>
 
 
+<property>
+  <name>hdfsproxy.ldap.initial.context.factory</name>
+  <value>com.sun.jndi.ldap.LdapCtxFactory</value>
+  <description> ldap initial context factory
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.ldap.provider.url</name>
+  <value>ldap://localhost:389</value>
+  <description> ldap server address
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.ldap.role.base</name>
+  <value>ou=proxyroles,dc=mycompany,dc=com</value>
+  <description> ldap role base
+  </description>
+</property>
+
+<property>
+    <name>fs.default.name</name>
+    <!-- cluster variant -->
+    <value>hdfs://localhost:54321</value>
+    <description>The name of the default file system.  Either the
+  literal string "local" or a host:port for NDFS.</description>
+    <final>true</final>
+  </property>
+
+<property>
+  <name>dfs.block.size</name>
+  <value>134217728</value>
+  <description>The default block size for new files.</description>
+</property>
+
+<property>
+    <name>io.file.buffer.size</name>
+    <value>131072</value>
+    <description>The size of buffer for use in sequence files.
+  The size of this buffer should probably be a multiple of hardware
+  page size (4096 on Intel x86), and it determines how much data is
+  buffered during read and write operations.</description>
+</property>
+
+
 </configuration>
 </configuration>
 
 

+ 7 - 8
src/contrib/hdfsproxy/conf/tomcat-forward-web.xml

@@ -38,21 +38,19 @@
         and comments about this application should be addressed.
         and comments about this application should be addressed.
       </description>
       </description>
     </context-param>
     </context-param>
-    
+          
     <filter>
     <filter>
-	   	<filter-name>proxyFilter</filter-name>
-	   	<filter-class>org.apache.hadoop.hdfsproxy.ProxyFilter</filter-class>
-	   	<init-param>
-	      <param-name>filteraddress</param-name>
-	      <param-value>10</param-value>
-	   	</init-param>
+	   	<filter-name>ldapIpDirFilter</filter-name>
+	   	<filter-class>org.apache.hadoop.hdfsproxy.LdapIpDirFilter</filter-class>
 		</filter>
 		</filter>
 
 
 		<filter-mapping>
 		<filter-mapping>
-        <filter-name>proxyFilter</filter-name>
+        <filter-name>ldapIpDirFilter</filter-name>
 				<url-pattern>/*</url-pattern>
 				<url-pattern>/*</url-pattern>
     </filter-mapping>
     </filter-mapping>
 
 
+
+
     
     
     <servlet>
     <servlet>
     	<servlet-name>proxyForward</servlet-name>
     	<servlet-name>proxyForward</servlet-name>
@@ -84,6 +82,7 @@
         <url-pattern>/file/*</url-pattern>
         <url-pattern>/file/*</url-pattern>
     </servlet-mapping>
     </servlet-mapping>
     
     
+    
 
 
 		<welcome-file-list>
 		<welcome-file-list>
 		  <welcome-file>index.html</welcome-file>
 		  <welcome-file>index.html</welcome-file>

+ 3 - 8
src/contrib/hdfsproxy/conf/tomcat-web.xml

@@ -58,21 +58,16 @@
     </context-param>
     </context-param>
     
     
     <filter>
     <filter>
-	   	<filter-name>proxyFilter</filter-name>
-	   	<filter-class>org.apache.hadoop.hdfsproxy.ProxyFilter</filter-class>
-	   	<init-param>
-	      <param-name>filteraddress</param-name>
-	      <param-value>10</param-value>
-	   	</init-param>
+	   	<filter-name>ldapIpDirFilter</filter-name>
+	   	<filter-class>org.apache.hadoop.hdfsproxy.LdapIpDirFilter</filter-class>
 		</filter>
 		</filter>
 
 
 		<filter-mapping>
 		<filter-mapping>
-        <filter-name>proxyFilter</filter-name>
+        <filter-name>ldapIpDirFilter</filter-name>
 				<url-pattern>/*</url-pattern>
 				<url-pattern>/*</url-pattern>
     </filter-mapping>
     </filter-mapping>
     	
     	
 
 
-
     <!-- Servlet definitions for the servlets that make up
     <!-- Servlet definitions for the servlets that make up
          your web application, including initialization
          your web application, including initialization
          parameters.  With Tomcat, you can also send requests
          parameters.  With Tomcat, you can also send requests

+ 262 - 0
src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java

@@ -0,0 +1,262 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.Hashtable;
+import java.util.regex.Pattern;
+
+import javax.naming.NamingEnumeration;
+import javax.naming.NamingException;
+import javax.naming.directory.Attribute;
+import javax.naming.directory.Attributes;
+import javax.naming.directory.BasicAttribute;
+import javax.naming.directory.BasicAttributes;
+import javax.naming.directory.SearchResult;
+import javax.naming.ldap.InitialLdapContext;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.UnixUserGroupInformation;
+
+public class LdapIpDirFilter implements Filter {
+  public static final Log LOG = LogFactory.getLog(LdapIpDirFilter.class);
+
+  private static String baseName;
+  private static String hdfsIpSchemaStr;
+  private static String hdfsIpSchemaStrPrefix;
+  private static String hdfsUidSchemaStr;
+  private static String hdfsGroupSchemaStr;
+  private static String hdfsPathSchemaStr;
+
+  private InitialLdapContext lctx;
+  private String userId;
+  private String groupName;
+  private ArrayList<String> paths;
+
+  /** Pattern for a filter to find out if a request is HFTP/HSFTP request */
+  protected static final Pattern HFTP_PATTERN = Pattern
+      .compile("^(/listPaths|/data|/streamFile|/file)$");
+  /**
+   * Pattern for a filter to find out if an HFTP/HSFTP request stores its file
+   * path in the extra path information associated with the URL; if not, the
+   * file path is stored in request parameter "filename"
+   */
+  protected static final Pattern FILEPATH_PATTERN = Pattern
+      .compile("^(/listPaths|/data|/file)$");
+
+  public void initialize(String bName, InitialLdapContext ctx) {
+    // hook to cooperate unit test
+    baseName = bName;
+    hdfsIpSchemaStr = "uniqueMember";
+    hdfsIpSchemaStrPrefix = "cn=";
+    hdfsUidSchemaStr = "uid";
+    hdfsGroupSchemaStr = "userClass";
+    hdfsPathSchemaStr = "documentLocation";
+    lctx = ctx;
+    paths = new ArrayList<String>();
+  }
+
+  /** {@inheritDoc} */
+  public void init(FilterConfig filterConfig) throws ServletException {
+    ServletContext context = filterConfig.getServletContext();
+    Configuration conf = new Configuration(false);
+    conf.addResource("hdfsproxy-default.xml");
+    conf.addResource("hdfsproxy-site.xml");
+    // extract namenode from source conf.
+    String nn = conf.get("fs.default.name");
+    if (nn == null) {
+      throw new ServletException(
+          "Proxy source cluster name node address not speficied");
+    }
+    InetSocketAddress nAddr = NetUtils.createSocketAddr(nn);
+    context.setAttribute("name.node.address", nAddr);
+    context.setAttribute("name.conf", conf);
+
+    // for storing hostname <--> cluster mapping to decide which source cluster
+    // to forward
+    context.setAttribute("org.apache.hadoop.hdfsproxy.conf", conf);
+
+    if (lctx == null) {
+      Hashtable<String, String> env = new Hashtable<String, String>();
+      env.put(InitialLdapContext.INITIAL_CONTEXT_FACTORY, conf.get(
+          "hdfsproxy.ldap.initial.context.factory",
+          "com.sun.jndi.ldap.LdapCtxFactory"));
+      env.put(InitialLdapContext.PROVIDER_URL, conf
+          .get("hdfsproxy.ldap.provider.url"));
+
+      try {
+        lctx = new InitialLdapContext(env, null);
+      } catch (NamingException ne) {
+        throw new ServletException("NamingException in initializing ldap"
+            + ne.toString());
+      }
+
+      baseName = conf.get("hdfsproxy.ldap.role.base");
+      hdfsIpSchemaStr = conf.get("hdfsproxy.ldap.ip.schema.string",
+          "uniqueMember");
+      hdfsIpSchemaStrPrefix = conf.get(
+          "hdfsproxy.ldap.ip.schema.string.prefix", "cn=");
+      hdfsUidSchemaStr = conf.get("hdfsproxy.ldap.uid.schema.string", "uid");
+      hdfsGroupSchemaStr = conf.get("hdfsproxy.ldap.group.schema.string",
+          "userClass");
+      hdfsPathSchemaStr = conf.get("hdfsproxy.ldap.hdfs.path.schema.string",
+          "documentLocation");
+      paths = new ArrayList<String>();
+    }
+    LOG.info("LdapIpDirFilter initialization success: " + nn);
+  }
+
+  /** {@inheritDoc} */
+  public void destroy() {
+  }
+
+  /** {@inheritDoc} */
+  public void doFilter(ServletRequest request, ServletResponse response,
+      FilterChain chain) throws IOException, ServletException {
+
+    HttpServletRequest rqst = (HttpServletRequest) request;
+    HttpServletResponse rsp = (HttpServletResponse) response;
+
+    if (LOG.isDebugEnabled()) {
+      StringBuilder b = new StringBuilder("Request from ").append(
+          rqst.getRemoteHost()).append("/").append(rqst.getRemoteAddr())
+          .append(":").append(rqst.getRemotePort());
+      b.append("\n The Scheme is " + rqst.getScheme());
+      b.append("\n The Path Info is " + rqst.getPathInfo());
+      b.append("\n The Translated Path Info is " + rqst.getPathTranslated());
+      b.append("\n The Context Path is " + rqst.getContextPath());
+      b.append("\n The Query String is " + rqst.getQueryString());
+      b.append("\n The Request URI is " + rqst.getRequestURI());
+      b.append("\n The Request URL is " + rqst.getRequestURL());
+      b.append("\n The Servlet Path is " + rqst.getServletPath());
+      LOG.debug(b.toString());
+    }
+    // check ip address
+    String userIp = rqst.getRemoteAddr();
+    boolean isAuthorized = false;
+    try {
+      isAuthorized = checkUserIp(userIp);
+      if (!isAuthorized) {
+        rsp.sendError(HttpServletResponse.SC_FORBIDDEN,
+            "IP not authorized to access");
+        return;
+      }
+    } catch (NamingException ne) {
+      throw new IOException("NameingException in searching ldap"
+          + ne.toString());
+    }
+    // check request path
+    String servletPath = rqst.getServletPath();
+    if (HFTP_PATTERN.matcher(servletPath).matches()) {
+      // request is an HSFTP request
+      if (FILEPATH_PATTERN.matcher(servletPath).matches()) {
+        // file path as part of the URL
+        isAuthorized = checkHdfsPath(rqst.getPathInfo() != null ? rqst
+            .getPathInfo() : "/");
+      } else {
+        // file path is stored in "filename" parameter
+        isAuthorized = checkHdfsPath(rqst.getParameter("filename"));
+      }
+    }
+    if (!isAuthorized) {
+      rsp.sendError(HttpServletResponse.SC_FORBIDDEN,
+          "User not authorized to access path");
+      return;
+    }
+    UnixUserGroupInformation ugi = new UnixUserGroupInformation(userId,
+        groupName.split(","));
+    rqst.setAttribute("authorized.ugi", ugi);
+    // since we cannot pass ugi object cross context as they are from different
+    // classloaders in different war file, we have to use String attribute.
+    rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID", userId);
+    rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.role", groupName);
+    LOG.info("User: " + userId + " (" + groupName + ") Request: "
+        + rqst.getPathInfo() + " From: " + rqst.getRemoteAddr());
+    chain.doFilter(request, response);
+  }
+
+  /** check that client's ip is listed in the Ldap Roles */
+  @SuppressWarnings("unchecked")
+  private boolean checkUserIp(String userIp) throws NamingException {
+    String ipMember = hdfsIpSchemaStrPrefix + userIp;
+    Attributes matchAttrs = new BasicAttributes(true);
+    matchAttrs.put(new BasicAttribute(hdfsIpSchemaStr, ipMember));
+    matchAttrs.put(new BasicAttribute(hdfsUidSchemaStr));
+    matchAttrs.put(new BasicAttribute(hdfsPathSchemaStr));
+
+    String[] attrIDs = { hdfsUidSchemaStr, hdfsGroupSchemaStr,
+        hdfsPathSchemaStr };
+
+    NamingEnumeration<SearchResult> results = lctx.search(baseName, matchAttrs,
+        attrIDs);
+    if (results.hasMore()) {
+      SearchResult sr = results.next();
+      Attributes attrs = sr.getAttributes();
+      for (NamingEnumeration ne = attrs.getAll(); ne.hasMore();) {
+        Attribute attr = (Attribute) ne.next();
+        if (hdfsUidSchemaStr.equalsIgnoreCase(attr.getID())) {
+          userId = (String) attr.get();
+        } else if (hdfsGroupSchemaStr.equalsIgnoreCase(attr.getID())) {
+          groupName = (String) attr.get();
+        } else if (hdfsPathSchemaStr.equalsIgnoreCase(attr.getID())) {
+          for (NamingEnumeration e = attr.getAll(); e.hasMore();) {
+            paths.add((String) e.next());
+          }
+        }
+      }
+      return true;
+    }
+    LOG.info("Ip address " + userIp
+        + " is not authorized to access the proxy server");
+    return false;
+  }
+
+  /** check that the requested path is listed in the ldap entry */
+  private boolean checkHdfsPath(String pathInfo) {
+    if (pathInfo == null || pathInfo.length() == 0) {
+      LOG.info("Can't get file path from the request");
+      return false;
+    }
+    Path userPath = new Path(pathInfo);
+    while (userPath != null) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("\n Checking file path " + userPath);
+      }
+      if (paths.contains(userPath.toString()))
+        return true;
+      userPath = userPath.getParent();
+    }
+    LOG.info("User " + userId + " is not authorized to access " + pathInfo);
+    return false;
+  }
+}

+ 14 - 6
src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java

@@ -24,7 +24,6 @@ import java.net.URISyntaxException;
 import javax.servlet.ServletContext;
 import javax.servlet.ServletContext;
 import javax.servlet.ServletException;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileStatus;
@@ -36,14 +35,14 @@ import org.apache.hadoop.security.UnixUserGroupInformation;
 public class ProxyFileDataServlet extends FileDataServlet {
 public class ProxyFileDataServlet extends FileDataServlet {
   /** For java.io.Serializable */
   /** For java.io.Serializable */
   private static final long serialVersionUID = 1L;
   private static final long serialVersionUID = 1L;
-  
+
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   @Override
   @Override
   public void init() throws ServletException {
   public void init() throws ServletException {
     ServletContext context = getServletContext();
     ServletContext context = getServletContext();
-    if (context.getAttribute("name.conf") == null) { 
+    if (context.getAttribute("name.conf") == null) {
       context.setAttribute("name.conf", new Configuration());
       context.setAttribute("name.conf", new Configuration());
-    }    
+    }
   }
   }
 
 
   /** {@inheritDoc} */
   /** {@inheritDoc} */
@@ -59,8 +58,17 @@ public class ProxyFileDataServlet extends FileDataServlet {
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   @Override
   @Override
   protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
   protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
-    String userID = (String) request.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
-    UnixUserGroupInformation ugi = ProxyUgiManager.getUgiForUser(userID);
+    String userID = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    String groupName = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.role");
+    UnixUserGroupInformation ugi;
+    if (groupName != null) {
+      // get group info from ldap
+      ugi = new UnixUserGroupInformation(userID, groupName.split(","));
+    } else {// stronger ugi management
+      ugi = ProxyUgiManager.getUgiForUser(userID);
+    }
     return ugi;
     return ugi;
   }
   }
 }
 }

+ 36 - 31
src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java

@@ -17,17 +17,18 @@
  */
  */
 package org.apache.hadoop.hdfsproxy;
 package org.apache.hadoop.hdfsproxy;
 
 
+import java.io.IOException;
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-import javax.servlet.ServletException;
-import javax.servlet.ServletContext;
-import javax.servlet.RequestDispatcher;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.UnixUserGroupInformation;
 
 
 /**
 /**
  * 
  * 
@@ -40,51 +41,55 @@ public class ProxyForwardServlet extends HttpServlet {
   private static final long serialVersionUID = 1L;
   private static final long serialVersionUID = 1L;
   private static Configuration configuration = null;
   private static Configuration configuration = null;
   public static final Log LOG = LogFactory.getLog(ProxyForwardServlet.class);
   public static final Log LOG = LogFactory.getLog(ProxyForwardServlet.class);
-  
+
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   @Override
   @Override
   public void init() throws ServletException {
   public void init() throws ServletException {
     ServletContext context = getServletContext();
     ServletContext context = getServletContext();
-    configuration = (Configuration) context.getAttribute("org.apache.hadoop.hdfsproxy.conf");
+    configuration = (Configuration) context
+        .getAttribute("org.apache.hadoop.hdfsproxy.conf");
   }
   }
-  
+
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   @Override
   @Override
   public void doGet(HttpServletRequest request, HttpServletResponse response)
   public void doGet(HttpServletRequest request, HttpServletResponse response)
-    throws IOException, ServletException {  
-    String hostname = request.getServerName(); 
-    
+      throws IOException, ServletException {
+    String hostname = request.getServerName();
+
     String version = configuration.get(hostname);
     String version = configuration.get(hostname);
-    if (version != null) {
-      ServletContext curContext = getServletContext();
-      ServletContext dstContext = curContext.getContext(version);
-      
-      if (dstContext == null) {
-        LOG.info("Context non-exist or restricted from access: " + version);
-        response.sendError(HttpServletResponse.SC_NOT_FOUND);
-        return;
-      }
-      LOG.debug("Request to " + hostname + " is forwarded to version " + version);
-      forwardRequest(request, response, dstContext, request.getServletPath());
+    if (version == null) {
+      // extract from hostname directly
+      String[] strs = hostname.split("[-\\.]");
+      version = "/" + strs[0];
+    }
+
+    ServletContext curContext = getServletContext();
+    ServletContext dstContext = curContext.getContext(version);
 
 
-    } else {
-      LOG.info("not a valid context path");
-      response.sendError(HttpServletResponse.SC_NOT_IMPLEMENTED); 
+    if (dstContext == null) {
+      LOG.info("Context non-exist or restricted from access: " + version);
+      response.sendError(HttpServletResponse.SC_NOT_FOUND);
+      return;
     }
     }
-  } 
+    LOG.debug("Request to " + hostname + " is forwarded to version " + version);
+    forwardRequest(request, response, dstContext, request.getServletPath());
+
+  }
+
   /** {@inheritDoc} */
   /** {@inheritDoc} */
-  public void forwardRequest(HttpServletRequest request, HttpServletResponse response, ServletContext context, String pathInfo) 
-    throws IOException, ServletException{
-    String path = buildForwardPath(request, pathInfo);    
+  public void forwardRequest(HttpServletRequest request,
+      HttpServletResponse response, ServletContext context, String pathInfo)
+      throws IOException, ServletException {
+    String path = buildForwardPath(request, pathInfo);
     RequestDispatcher dispatcher = context.getRequestDispatcher(path);
     RequestDispatcher dispatcher = context.getRequestDispatcher(path);
     if (dispatcher == null) {
     if (dispatcher == null) {
-      LOG.info("There was no such dispatcher");
+      LOG.info("There was no such dispatcher: " + path);
       response.sendError(HttpServletResponse.SC_NO_CONTENT);
       response.sendError(HttpServletResponse.SC_NO_CONTENT);
       return;
       return;
     }
     }
     dispatcher.forward(request, response);
     dispatcher.forward(request, response);
   }
   }
-  
+
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   protected String buildForwardPath(HttpServletRequest request, String pathInfo) {
   protected String buildForwardPath(HttpServletRequest request, String pathInfo) {
     String path = pathInfo;
     String path = pathInfo;

+ 14 - 5
src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java

@@ -29,21 +29,30 @@ import org.apache.hadoop.security.UnixUserGroupInformation;
 public class ProxyListPathsServlet extends ListPathsServlet {
 public class ProxyListPathsServlet extends ListPathsServlet {
   /** For java.io.Serializable */
   /** For java.io.Serializable */
   private static final long serialVersionUID = 1L;
   private static final long serialVersionUID = 1L;
-  
+
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   @Override
   @Override
   public void init() throws ServletException {
   public void init() throws ServletException {
     ServletContext context = getServletContext();
     ServletContext context = getServletContext();
-    if (context.getAttribute("name.conf") == null) { 
+    if (context.getAttribute("name.conf") == null) {
       context.setAttribute("name.conf", new Configuration());
       context.setAttribute("name.conf", new Configuration());
-    }    
+    }
   }
   }
 
 
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   @Override
   @Override
   protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
   protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
-    String userID = (String) request.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
-    UnixUserGroupInformation ugi = ProxyUgiManager.getUgiForUser(userID);
+    String userID = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    String groupName = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.role");
+    UnixUserGroupInformation ugi;
+    if (groupName != null) {
+      // group info stored in ldap
+      ugi = new UnixUserGroupInformation(userID, groupName.split(","));
+    } else {// stronger ugi management
+      ugi = ProxyUgiManager.getUgiForUser(userID);
+    }
     return ugi;
     return ugi;
   }
   }
 }
 }

+ 16 - 5
src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java

@@ -24,22 +24,23 @@ import javax.servlet.ServletContext;
 import javax.servlet.ServletException;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequest;
 
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.server.namenode.StreamFile;
 import org.apache.hadoop.hdfs.server.namenode.StreamFile;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.security.UnixUserGroupInformation;
-import org.apache.hadoop.conf.Configuration;
 
 
 /** {@inheritDoc} */
 /** {@inheritDoc} */
 public class ProxyStreamFile extends StreamFile {
 public class ProxyStreamFile extends StreamFile {
   /** For java.io.Serializable */
   /** For java.io.Serializable */
   private static final long serialVersionUID = 1L;
   private static final long serialVersionUID = 1L;
+
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   @Override
   @Override
   public void init() throws ServletException {
   public void init() throws ServletException {
     ServletContext context = getServletContext();
     ServletContext context = getServletContext();
-    if (context.getAttribute("name.conf") == null) { 
+    if (context.getAttribute("name.conf") == null) {
       context.setAttribute("name.conf", new Configuration());
       context.setAttribute("name.conf", new Configuration());
-    }    
+    }
   }
   }
 
 
   /** {@inheritDoc} */
   /** {@inheritDoc} */
@@ -59,8 +60,18 @@ public class ProxyStreamFile extends StreamFile {
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   @Override
   @Override
   protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
   protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
-    String userID = (String) request.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
-    UnixUserGroupInformation ugi = ProxyUgiManager.getUgiForUser(userID);
+    String userID = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    String groupName = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.role");
+    UnixUserGroupInformation ugi;
+    if (groupName != null) {
+      // get group info from ldap
+      ugi = new UnixUserGroupInformation(userID, groupName.split(","));
+    } else {// stronger ugi management
+      ugi = ProxyUgiManager.getUgiForUser(userID);
+    }
     return ugi;
     return ugi;
   }
   }
+
 }
 }

+ 142 - 75
src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java

@@ -18,6 +18,7 @@
 
 
 package org.apache.hadoop.hdfsproxy;
 package org.apache.hadoop.hdfsproxy;
 
 
+import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStream;
 import java.net.HttpURLConnection;
 import java.net.HttpURLConnection;
@@ -25,13 +26,20 @@ import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URISyntaxException;
 import java.net.URL;
 import java.net.URL;
+import java.security.KeyStore;
 import java.security.cert.X509Certificate;
 import java.security.cert.X509Certificate;
 import java.util.Date;
 import java.util.Date;
 import java.util.Set;
 import java.util.Set;
 
 
-import javax.net.ssl.HttpsURLConnection;
 import javax.net.ssl.HostnameVerifier;
 import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.KeyManager;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.SSLContext;
 import javax.net.ssl.SSLSession;
 import javax.net.ssl.SSLSession;
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.TrustManagerFactory;
+import javax.net.ssl.X509TrustManager;
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpServletResponse;
 
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
@@ -43,17 +51,19 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.HostsFileReader;
 import org.apache.hadoop.util.HostsFileReader;
 
 
-
 /**
 /**
  * Proxy Utility .
  * Proxy Utility .
  */
  */
 public class ProxyUtil {
 public class ProxyUtil {
   public static final Log LOG = LogFactory.getLog(ProxyUtil.class);
   public static final Log LOG = LogFactory.getLog(ProxyUtil.class);
   private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;
   private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;
-  private static final int CERT_EXPIRATION_WARNING_THRESHOLD = 30; // 30 days warning
-  
+  private static final int CERT_EXPIRATION_WARNING_THRESHOLD = 30; // 30 days
+
+  // warning
+
   private static enum UtilityOption {
   private static enum UtilityOption {
-    RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache"), GET("-get"), CHECKCERTS("-checkcerts");
+    RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache"), GET("-get"), CHECKCERTS(
+        "-checkcerts");
 
 
     private String name = null;
     private String name = null;
 
 
@@ -65,7 +75,7 @@ public class ProxyUtil {
       return name;
       return name;
     }
     }
   }
   }
-  
+
   /**
   /**
    * Dummy hostname verifier that is used to bypass hostname checking
    * Dummy hostname verifier that is used to bypass hostname checking
    */
    */
@@ -75,6 +85,21 @@ public class ProxyUtil {
     }
     }
   }
   }
 
 
+  /**
+   * Dummy trustmanager that is used to bypass server certificate checking
+   */
+  private static class DummyTrustManager implements X509TrustManager {
+    public void checkClientTrusted(X509Certificate[] chain, String authType) {
+    }
+
+    public void checkServerTrusted(X509Certificate[] chain, String authType) {
+    }
+
+    public X509Certificate[] getAcceptedIssuers() {
+      return null;
+    }
+  }
+
   private static HttpsURLConnection openConnection(String hostname, int port,
   private static HttpsURLConnection openConnection(String hostname, int port,
       String path) throws IOException {
       String path) throws IOException {
     try {
     try {
@@ -90,21 +115,53 @@ public class ProxyUtil {
     }
     }
   }
   }
 
 
-  private static void setupSslProps(Configuration conf) {
-    System.setProperty("javax.net.ssl.trustStore", conf
-        .get("ssl.client.truststore.location"));
-    System.setProperty("javax.net.ssl.trustStorePassword", conf.get(
-        "ssl.client.truststore.password", ""));
-    System.setProperty("javax.net.ssl.trustStoreType", conf.get(
-        "ssl.client.truststore.type", "jks"));
-    System.setProperty("javax.net.ssl.keyStore", conf
-        .get("ssl.client.keystore.location"));
-    System.setProperty("javax.net.ssl.keyStorePassword", conf.get(
-        "ssl.client.keystore.password", ""));
-    System.setProperty("javax.net.ssl.keyPassword", conf.get(
-        "ssl.client.keystore.keypassword", ""));
-    System.setProperty("javax.net.ssl.keyStoreType", conf.get(
-        "ssl.client.keystore.type", "jks"));
+  private static void setupSslProps(Configuration conf) throws IOException {
+    FileInputStream fis = null;
+    try {
+      SSLContext sc = SSLContext.getInstance("SSL");
+      KeyManager[] kms = null;
+      TrustManager[] tms = null;
+      if (conf.get("ssl.client.keystore.location") != null) {
+        // initialize default key manager with keystore file and pass
+        KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
+        KeyStore ks = KeyStore.getInstance(conf.get("ssl.client.keystore.type",
+            "JKS"));
+        char[] ksPass = conf.get("ssl.client.keystore.password", "changeit")
+            .toCharArray();
+        fis = new FileInputStream(conf.get("ssl.client.keystore.location",
+            "keystore.jks"));
+        ks.load(fis, ksPass);
+        kmf.init(ks, conf.get("ssl.client.keystore.keypassword", "changeit")
+            .toCharArray());
+        kms = kmf.getKeyManagers();
+        fis.close();
+        fis = null;
+      }
+      // initialize default trust manager with keystore file and pass
+      if (conf.getBoolean("ssl.client.do.not.authenticate.server", false)) {
+        // by pass trustmanager validation
+        tms = new DummyTrustManager[] { new DummyTrustManager() };
+      } else {
+        TrustManagerFactory tmf = TrustManagerFactory.getInstance("PKIX");
+        KeyStore ts = KeyStore.getInstance(conf.get(
+            "ssl.client.truststore.type", "JKS"));
+        char[] tsPass = conf.get("ssl.client.truststore.password", "changeit")
+            .toCharArray();
+        fis = new FileInputStream(conf.get("ssl.client.truststore.location",
+            "truststore.jks"));
+        ts.load(fis, tsPass);
+        tmf.init(ts);
+        tms = tmf.getTrustManagers();
+      }
+      sc.init(kms, tms, new java.security.SecureRandom());
+      HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
+    } catch (Exception e) {
+      throw new IOException("Could not initialize SSLContext", e);
+    } finally {
+      if (fis != null) {
+        fis.close();
+      }
+    }
   }
   }
 
 
   static InetSocketAddress getSslAddr(Configuration conf) throws IOException {
   static InetSocketAddress getSslAddr(Configuration conf) throws IOException {
@@ -121,31 +178,33 @@ public class ProxyUtil {
     int err = 0;
     int err = 0;
     StringBuilder b = new StringBuilder();
     StringBuilder b = new StringBuilder();
 
 
-    HostsFileReader hostsReader = new HostsFileReader(conf.get("hdfsproxy.hosts",
-        "hdfsproxy-hosts"), "");
+    HostsFileReader hostsReader = new HostsFileReader(conf.get(
+        "hdfsproxy.hosts", "hdfsproxy-hosts"), "");
     Set<String> hostsList = hostsReader.getHosts();
     Set<String> hostsList = hostsReader.getHosts();
     for (String hostname : hostsList) {
     for (String hostname : hostsList) {
       HttpsURLConnection connection = null;
       HttpsURLConnection connection = null;
       try {
       try {
-        connection = openConnection(hostname, sslPort, path);  
-        connection.connect(); 
+        connection = openConnection(hostname, sslPort, path);
+        connection.connect();
         if (LOG.isDebugEnabled()) {
         if (LOG.isDebugEnabled()) {
           StringBuffer sb = new StringBuffer();
           StringBuffer sb = new StringBuffer();
-          X509Certificate[] clientCerts = (X509Certificate[]) connection.getLocalCertificates();
+          X509Certificate[] clientCerts = (X509Certificate[]) connection
+              .getLocalCertificates();
           if (clientCerts != null) {
           if (clientCerts != null) {
             for (X509Certificate cert : clientCerts)
             for (X509Certificate cert : clientCerts)
               sb.append("\n Client certificate Subject Name is "
               sb.append("\n Client certificate Subject Name is "
                   + cert.getSubjectX500Principal().getName());
                   + cert.getSubjectX500Principal().getName());
           } else {
           } else {
-            sb.append("\n No client certificates were found");  
+            sb.append("\n No client certificates were found");
           }
           }
-          X509Certificate[] serverCerts = (X509Certificate[]) connection.getServerCertificates();
+          X509Certificate[] serverCerts = (X509Certificate[]) connection
+              .getServerCertificates();
           if (serverCerts != null) {
           if (serverCerts != null) {
             for (X509Certificate cert : serverCerts)
             for (X509Certificate cert : serverCerts)
               sb.append("\n Server certificate Subject Name is "
               sb.append("\n Server certificate Subject Name is "
                   + cert.getSubjectX500Principal().getName());
                   + cert.getSubjectX500Principal().getName());
           } else {
           } else {
-            sb.append("\n No server certificates were found");  
+            sb.append("\n No server certificates were found");
           }
           }
           LOG.debug(sb.toString());
           LOG.debug(sb.toString());
         }
         }
@@ -156,7 +215,8 @@ public class ProxyUtil {
         }
         }
       } catch (IOException e) {
       } catch (IOException e) {
         b.append("\n\t" + hostname + ": " + e.getLocalizedMessage());
         b.append("\n\t" + hostname + ": " + e.getLocalizedMessage());
-        if (LOG.isDebugEnabled()) e.printStackTrace();
+        if (LOG.isDebugEnabled())
+          LOG.debug("Exception happend for host " + hostname, e);
         err++;
         err++;
       } finally {
       } finally {
         if (connection != null)
         if (connection != null)
@@ -164,65 +224,73 @@ public class ProxyUtil {
       }
       }
     }
     }
     if (err > 0) {
     if (err > 0) {
-      System.err.print("Command failed on the following "
-          + err + " host" + (err==1?":":"s:") + b.toString() + "\n");
+      System.err.print("Command failed on the following " + err + " host"
+          + (err == 1 ? ":" : "s:") + b.toString() + "\n");
       return false;
       return false;
     }
     }
     return true;
     return true;
   }
   }
-  
-  
-  static FSDataInputStream open(Configuration conf, String hostname, int port, String path) throws IOException {
+
+  static FSDataInputStream open(Configuration conf, String hostname, int port,
+      String path) throws IOException {
     setupSslProps(conf);
     setupSslProps(conf);
     HttpURLConnection connection = null;
     HttpURLConnection connection = null;
     connection = openConnection(hostname, port, path);
     connection = openConnection(hostname, port, path);
     connection.connect();
     connection.connect();
     final InputStream in = connection.getInputStream();
     final InputStream in = connection.getInputStream();
     return new FSDataInputStream(new FSInputStream() {
     return new FSDataInputStream(new FSInputStream() {
-        public int read() throws IOException {
-          return in.read();
-        }
-        public int read(byte[] b, int off, int len) throws IOException {
-          return in.read(b, off, len);
-        }
+      public int read() throws IOException {
+        return in.read();
+      }
 
 
-        public void close() throws IOException {
-          in.close();
-        }
+      public int read(byte[] b, int off, int len) throws IOException {
+        return in.read(b, off, len);
+      }
 
 
-        public void seek(long pos) throws IOException {
-          throw new IOException("Can't seek!");
-        }
-        public long getPos() throws IOException {
-          throw new IOException("Position unknown!");
-        }
-        public boolean seekToNewSource(long targetPos) throws IOException {
-          return false;
-        }
-      });
+      public void close() throws IOException {
+        in.close();
+      }
+
+      public void seek(long pos) throws IOException {
+        throw new IOException("Can't seek!");
+      }
+
+      public long getPos() throws IOException {
+        throw new IOException("Position unknown!");
+      }
+
+      public boolean seekToNewSource(long targetPos) throws IOException {
+        return false;
+      }
+    });
   }
   }
-  
-  static void checkServerCertsExpirationDays(Configuration conf, String hostname, int port) throws IOException {
+
+  static void checkServerCertsExpirationDays(Configuration conf,
+      String hostname, int port) throws IOException {
     setupSslProps(conf);
     setupSslProps(conf);
     HttpsURLConnection connection = null;
     HttpsURLConnection connection = null;
     connection = openConnection(hostname, port, null);
     connection = openConnection(hostname, port, null);
     connection.connect();
     connection.connect();
-    X509Certificate[] serverCerts = (X509Certificate[]) connection.getServerCertificates();
+    X509Certificate[] serverCerts = (X509Certificate[]) connection
+        .getServerCertificates();
     Date curDate = new Date();
     Date curDate = new Date();
     long curTime = curDate.getTime();
     long curTime = curDate.getTime();
     if (serverCerts != null) {
     if (serverCerts != null) {
       for (X509Certificate cert : serverCerts) {
       for (X509Certificate cert : serverCerts) {
         StringBuffer sb = new StringBuffer();
         StringBuffer sb = new StringBuffer();
-        sb.append("\n Server certificate Subject Name: " + cert.getSubjectX500Principal().getName());
+        sb.append("\n Server certificate Subject Name: "
+            + cert.getSubjectX500Principal().getName());
         Date expDate = cert.getNotAfter();
         Date expDate = cert.getNotAfter();
         long expTime = expDate.getTime();
         long expTime = expDate.getTime();
-        int dayOffSet = (int) ((expTime - curTime)/MM_SECONDS_PER_DAY);
+        int dayOffSet = (int) ((expTime - curTime) / MM_SECONDS_PER_DAY);
         sb.append(" have " + dayOffSet + " days to expire");
         sb.append(" have " + dayOffSet + " days to expire");
-        if (dayOffSet < CERT_EXPIRATION_WARNING_THRESHOLD) LOG.warn(sb.toString());
-        else LOG.info(sb.toString());
+        if (dayOffSet < CERT_EXPIRATION_WARNING_THRESHOLD)
+          LOG.warn(sb.toString());
+        else
+          LOG.info(sb.toString());
       }
       }
     } else {
     } else {
-      LOG.info("\n No Server certs was found");  
+      LOG.info("\n No Server certs was found");
     }
     }
 
 
     if (connection != null) {
     if (connection != null) {
@@ -231,24 +299,23 @@ public class ProxyUtil {
   }
   }
 
 
   public static void main(String[] args) throws Exception {
   public static void main(String[] args) throws Exception {
-    if(args.length < 1 || 
-        (!UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0]) 
+    if (args.length < 1
+        || (!UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])
             && !UtilityOption.CLEAR.getName().equalsIgnoreCase(args[0])
             && !UtilityOption.CLEAR.getName().equalsIgnoreCase(args[0])
-            && !UtilityOption.GET.getName().equalsIgnoreCase(args[0])
-            && !UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0])) ||
-            (UtilityOption.GET.getName().equalsIgnoreCase(args[0]) && args.length != 4) ||
-            (UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0]) && args.length != 3)) {
-      System.err.println("Usage: ProxyUtil ["
-          + UtilityOption.RELOAD.getName() + "] | ["
-          + UtilityOption.CLEAR.getName() + "] | ["
+            && !UtilityOption.GET.getName().equalsIgnoreCase(args[0]) && !UtilityOption.CHECKCERTS
+            .getName().equalsIgnoreCase(args[0]))
+        || (UtilityOption.GET.getName().equalsIgnoreCase(args[0]) && args.length != 4)
+        || (UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0]) && args.length != 3)) {
+      System.err.println("Usage: ProxyUtil [" + UtilityOption.RELOAD.getName()
+          + "] | [" + UtilityOption.CLEAR.getName() + "] | ["
           + UtilityOption.GET.getName() + " <hostname> <#port> <path> ] | ["
           + UtilityOption.GET.getName() + " <hostname> <#port> <path> ] | ["
           + UtilityOption.CHECKCERTS.getName() + " <hostname> <#port> ]");
           + UtilityOption.CHECKCERTS.getName() + " <hostname> <#port> ]");
-      System.exit(0);      
+      System.exit(0);
     }
     }
-    Configuration conf = new Configuration(false);   
+    Configuration conf = new Configuration(false);
     conf.addResource("ssl-client.xml");
     conf.addResource("ssl-client.xml");
     conf.addResource("hdfsproxy-default.xml");
     conf.addResource("hdfsproxy-default.xml");
-     
+
     if (UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])) {
     if (UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])) {
       // reload user-certs.xml and user-permissions.xml files
       // reload user-certs.xml and user-permissions.xml files
       sendCommand(conf, "/reloadPermFiles");
       sendCommand(conf, "/reloadPermFiles");
@@ -266,5 +333,5 @@ public class ProxyUtil {
       in.close();
       in.close();
     }
     }
   }
   }
-        
+
 }
 }

+ 128 - 0
src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/DummyLdapContext.java

@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.util.ArrayList;
+import java.util.Hashtable;
+
+import javax.naming.NamingEnumeration;
+import javax.naming.NamingException;
+import javax.naming.directory.Attribute;
+import javax.naming.directory.Attributes;
+import javax.naming.directory.BasicAttribute;
+import javax.naming.directory.BasicAttributes;
+import javax.naming.directory.SearchResult;
+import javax.naming.ldap.Control;
+import javax.naming.ldap.InitialLdapContext;
+
+class DummyLdapContext extends InitialLdapContext {
+  class ResultEnum<T> implements NamingEnumeration<T> {
+    private ArrayList<T> rl;
+
+    public ResultEnum() {
+      rl = new ArrayList<T>();
+    }
+
+    public ResultEnum(ArrayList<T> al) {
+      rl = al;
+    }
+
+    public boolean hasMoreElements() {
+      return !rl.isEmpty();
+    }
+
+    public T nextElement() {
+      T t = rl.get(0);
+      rl.remove(0);
+      return t;
+    }
+
+    public boolean hasMore() throws NamingException {
+      return !rl.isEmpty();
+    }
+
+    public T next() throws NamingException {
+      T t = rl.get(0);
+      rl.remove(0);
+      return t;
+    }
+
+    public void close() throws NamingException {
+    }
+  }
+
+  public DummyLdapContext() throws NamingException {
+  }
+
+  public DummyLdapContext(Hashtable<?, ?> environment, Control[] connCtls)
+      throws NamingException {
+  }
+
+  public NamingEnumeration<SearchResult> search(String name,
+      Attributes matchingAttributes, String[] attributesToReturn)
+      throws NamingException {
+    System.out.println("Searching Dummy LDAP Server Results:");
+    if (!"ou=proxyroles,dc=mycompany,dc=com".equalsIgnoreCase(name)) {
+      System.out.println("baseName mismatch");
+      return new ResultEnum<SearchResult>();
+    }
+    if (!"cn=127.0.0.1".equals((String) matchingAttributes.get("uniqueMember")
+        .get())) {
+      System.out.println("Ip address mismatch");
+      return new ResultEnum<SearchResult>();
+    }
+    BasicAttributes attrs = new BasicAttributes();
+    BasicAttribute uidAttr = new BasicAttribute("uid", "testuser");
+    attrs.put(uidAttr);
+    BasicAttribute groupAttr = new BasicAttribute("userClass", "testgroup");
+    attrs.put(groupAttr);
+    BasicAttribute locAttr = new BasicAttribute("documentLocation", "/testdir");
+    attrs.put(locAttr);
+    SearchResult sr = new SearchResult(null, null, attrs);
+    ArrayList<SearchResult> al = new ArrayList<SearchResult>();
+    al.add(sr);
+    NamingEnumeration<SearchResult> ne = new ResultEnum<SearchResult>(al);
+    return ne;
+  }
+
+  @SuppressWarnings("unchecked")
+  public static void main(String[] args) throws Exception {
+    DummyLdapContext dlc = new DummyLdapContext();
+    String baseName = "ou=proxyroles,dc=mycompany,dc=com";
+    Attributes matchAttrs = new BasicAttributes(true);
+    String[] attrIDs = { "uid", "documentLocation" };
+    NamingEnumeration<SearchResult> results = dlc.search(baseName, matchAttrs,
+        attrIDs);
+    if (results.hasMore()) {
+      SearchResult sr = results.next();
+      Attributes attrs = sr.getAttributes();
+      for (NamingEnumeration ne = attrs.getAll(); ne.hasMore();) {
+        Attribute attr = (Attribute) ne.next();
+        if ("uid".equalsIgnoreCase(attr.getID())) {
+          System.out.println("User ID = " + attr.get());
+        } else if ("documentLocation".equalsIgnoreCase(attr.getID())) {
+          System.out.println("Document Location = ");
+          for (NamingEnumeration e = attr.getAll(); e.hasMore();) {
+            System.out.println(e.next());
+          }
+        }
+      }
+    }
+  }
+}

+ 139 - 0
src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java

@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+
+import javax.naming.NamingException;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+
+import org.apache.cactus.FilterTestCase;
+import org.apache.cactus.WebRequest;
+import org.apache.cactus.WebResponse;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class TestLdapIpDirFilter extends FilterTestCase {
+
+  public static final Log LOG = LogFactory.getLog(TestLdapIpDirFilter.class);
+
+  private class DummyFilterChain implements FilterChain {
+    public void doFilter(ServletRequest theRequest, ServletResponse theResponse)
+        throws IOException, ServletException {
+      PrintWriter writer = theResponse.getWriter();
+
+      writer.print("<p>some content</p>");
+      writer.close();
+    }
+
+    public void init(FilterConfig theConfig) {
+    }
+
+    public void destroy() {
+    }
+  }
+
+  public void testIpRestriction() throws ServletException, IOException,
+      NamingException {
+    LdapIpDirFilter filter = new LdapIpDirFilter();
+    String baseName = "ou=proxyroles,dc=mycompany,dc=com";
+    DummyLdapContext dlc = new DummyLdapContext();
+    filter.initialize(baseName, dlc);
+    request.setRemoteIPAddress("127.0.0.2");
+    request.removeAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.doFilter(request, response, mockFilterChain);
+    assertNull(request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID"));
+  }
+
+  public void endIpRestriction(WebResponse theResponse) {
+    assertEquals(theResponse.getStatusCode(), 403);
+    assertTrue("Text missing 'IP not authorized to access' : : ["
+        + theResponse.getText() + "]", theResponse.getText().indexOf(
+        "IP not authorized to access") > 0);
+  }
+
+  public void beginPathRestriction(WebRequest theRequest) {
+    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
+        "filename=/nontestdir");
+  }
+
+  public void testPathRestriction() throws ServletException, IOException,
+      NamingException {
+    LdapIpDirFilter filter = new LdapIpDirFilter();
+    String baseName = "ou=proxyroles,dc=mycompany,dc=com";
+    DummyLdapContext dlc = new DummyLdapContext();
+    filter.initialize(baseName, dlc);
+    request.setRemoteIPAddress("127.0.0.1");
+    request.removeAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.doFilter(request, response, mockFilterChain);
+    assertNull(request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID"));
+  }
+
+  public void endPathRestriction(WebResponse theResponse) {
+    assertEquals(theResponse.getStatusCode(), 403);
+    assertTrue("Text missing 'User not authorized to access path' : : ["
+        + theResponse.getText() + "]", theResponse.getText().indexOf(
+        "User not authorized to access path") > 0);
+  }
+
+  public void beginDoFilter(WebRequest theRequest) {
+    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
+        "filename=/testdir");
+  }
+
+  public void testDoFilter() throws ServletException, IOException,
+      NamingException {
+    LdapIpDirFilter filter = new LdapIpDirFilter();
+    String baseName = "ou=proxyroles,dc=mycompany,dc=com";
+    DummyLdapContext dlc = new DummyLdapContext();
+    filter.initialize(baseName, dlc);
+    request.setRemoteIPAddress("127.0.0.1");
+
+    ServletContext context = config.getServletContext();
+    context.removeAttribute("name.node.address");
+    context.removeAttribute("name.conf");
+    assertNull(context.getAttribute("name.node.address"));
+    assertNull(context.getAttribute("name.conf"));
+    filter.init(config);
+    assertNotNull(context.getAttribute("name.node.address"));
+    assertNotNull(context.getAttribute("name.conf"));
+
+    request.removeAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.doFilter(request, response, mockFilterChain);
+    assertEquals(request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID"),
+        "testuser");
+
+  }
+
+  public void endDoFilter(WebResponse theResponse) {
+    assertEquals("<p>some content</p>", theResponse.getText());
+  }
+
+}

+ 30 - 0
src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml

@@ -70,5 +70,35 @@
   </description>
   </description>
 </property>
 </property>
 
 
+<property>
+  <name>hdfsproxy.ldap.initial.context.factory</name>
+  <value>com.sun.jndi.ldap.LdapCtxFactory</value>
+  <description> ldap initial context factory
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.ldap.provider.url</name>
+  <value>ldap://ldapserver:389</value>
+  <description> ldap server address
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.ldap.role.base</name>
+  <value>ou=proxyroles,dc=mycompany,dc=com</value>
+  <description> ldap role base
+  </description>
+</property>
+
+<property>
+  <name>fs.default.name</name>
+  <!-- cluster variant -->
+  <value>hdfs://localhost:8020</value>
+  <description>The name of the default file system.  Either the
+  literal string "local" or a host:port for NDFS.</description>
+  <final>true</final>
+</property>
+
 </configuration>
 </configuration>
 
 

+ 1 - 1
src/contrib/hdfsproxy/src/test/resources/proxy-config/user-certs.xml

@@ -75,7 +75,7 @@ Any leading or trailing whitespaces are stripped/ignored.
 
 
 <property>
 <property>
   <name> Admin </name>
   <name> Admin </name>
-  <value>, 5,  ,,  3 , 9a2cf0be9ddf8280
+  <value>, 6,  ,,  3 , 9a2cf0be9ddf8280
 
 
 
 
 
 

+ 154 - 0
src/contrib/hdfsproxy/src/test/resources/tomcat-web.xml

@@ -0,0 +1,154 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<!DOCTYPE web-app 
+    PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" 
+    "http://java.sun.com/dtd/web-app_2_3.dtd">
+
+<web-app>
+
+
+    <!-- General description of your web application -->
+
+    <display-name>HDFS Proxy</display-name>
+    <description>
+      get data from grid 
+    </description>
+
+
+    <!-- Context initialization parameters that define shared
+         String constants used within your application, which
+         can be customized by the system administrator who is
+         installing your application.  The values actually
+         assigned to these parameters can be retrieved in a
+         servlet or JSP page by calling:
+
+             String value =
+               getServletContext().getInitParameter("name");
+
+         where "name" matches the <param-name> element of
+         one of these initialization parameters.
+
+         You can define any number of context initialization
+         parameters, including zero.
+    -->
+
+    <context-param>
+      <param-name>webmaster</param-name>
+      <param-value>zhiyong1@yahoo-inc.com</param-value>
+      <description>
+        The EMAIL address of the administrator to whom questions
+        and comments about this application should be addressed.
+      </description>
+    </context-param>
+    
+     	
+
+
+    <!-- Servlet definitions for the servlets that make up
+         your web application, including initialization
+         parameters.  With Tomcat, you can also send requests
+         to servlets not listed here with a request like this:
+
+           http://localhost:8080/{context-path}/servlet/{classname}
+
+         but this usage is not guaranteed to be portable.  It also
+         makes relative references to images and other resources
+         required by your servlet more complicated, so defining
+         all of your servlets (and defining a mapping to them with
+         a servlet-mapping element) is recommended.
+
+         Servlet initialization parameters can be retrieved in a
+         servlet or JSP page by calling:
+
+             String value =
+               getServletConfig().getInitParameter("name");
+
+         where "name" matches the <param-name> element of
+         one of these initialization parameters.
+
+         You can define any number of servlets, including zero.
+    -->
+
+		 <filter>
+	        <filter-name>proxyFilter</filter-name>
+	        <filter-class>org.apache.hadoop.hdfsproxy.ProxyFilter</filter-class>
+	   </filter>
+
+    <filter-mapping>
+        <filter-name>proxyFilter</filter-name>
+        <url-pattern>/*</url-pattern>
+    </filter-mapping>
+    
+    <servlet>
+    	<servlet-name>listPaths</servlet-name>
+      <description>list paths data access</description>
+      <servlet-class>org.apache.hadoop.hdfsproxy.ProxyListPathsServlet</servlet-class>
+    </servlet>
+    
+    <servlet-mapping>
+        <servlet-name>listPaths</servlet-name>
+        <url-pattern>/listPaths/*</url-pattern>
+    </servlet-mapping>
+
+		<servlet>
+    	<servlet-name>data</servlet-name>
+      <description>data access</description>
+      <servlet-class>org.apache.hadoop.hdfsproxy.ProxyFileDataServlet</servlet-class>
+    </servlet>
+    
+	  <servlet-mapping>
+        <servlet-name>data</servlet-name>
+        <url-pattern>/data/*</url-pattern>
+    </servlet-mapping>
+    
+    <servlet>
+    	<servlet-name>streamFile</servlet-name>
+      <description>stream file access</description>
+      <servlet-class>org.apache.hadoop.hdfsproxy.ProxyStreamFile</servlet-class>
+    </servlet>
+    
+    <servlet-mapping>
+        <servlet-name>streamFile</servlet-name>
+        <url-pattern>/streamFile/*</url-pattern>
+    </servlet-mapping>
+    
+
+		<welcome-file-list>
+		  <welcome-file>index.html</welcome-file>
+		</welcome-file-list>
+
+    <!-- Define the default session timeout for your application,
+         in minutes.  From a servlet or JSP page, you can modify
+         the timeout for a particular session dynamically by using
+         HttpSession.getMaxInactiveInterval(). -->
+
+    <session-config>
+      <session-timeout>30</session-timeout>    <!-- 30 minutes -->
+    </session-config>    
+
+
+</web-app>
+
+
+
+
+
+
+
+

+ 94 - 36
src/java/org/apache/hadoop/hdfs/HsftpFileSystem.java

@@ -18,33 +18,40 @@
 
 
 package org.apache.hadoop.hdfs;
 package org.apache.hadoop.hdfs;
 
 
+import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.HttpURLConnection;
 import java.net.URI;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URISyntaxException;
 import java.net.URL;
 import java.net.URL;
+import java.security.KeyStore;
 import java.security.cert.X509Certificate;
 import java.security.cert.X509Certificate;
 
 
 import javax.net.ssl.HostnameVerifier;
 import javax.net.ssl.HostnameVerifier;
 import javax.net.ssl.HttpsURLConnection;
 import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.KeyManager;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.SSLContext;
 import javax.net.ssl.SSLSession;
 import javax.net.ssl.SSLSession;
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.TrustManagerFactory;
+import javax.net.ssl.X509TrustManager;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 
 
-
-
-/** An implementation of a protocol for accessing filesystems over HTTPS.
- * The following implementation provides a limited, read-only interface
- * to a filesystem over HTTPS.
+/**
+ * An implementation of a protocol for accessing filesystems over HTTPS. The
+ * following implementation provides a limited, read-only interface to a
+ * filesystem over HTTPS.
+ * 
  * @see org.apache.hadoop.hdfs.server.namenode.ListPathsServlet
  * @see org.apache.hadoop.hdfs.server.namenode.ListPathsServlet
  * @see org.apache.hadoop.hdfs.server.namenode.FileDataServlet
  * @see org.apache.hadoop.hdfs.server.namenode.FileDataServlet
  */
  */
 public class HsftpFileSystem extends HftpFileSystem {
 public class HsftpFileSystem extends HftpFileSystem {
 
 
   private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;
   private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;
-  private volatile int ExpWarnDays = 0;  
-  
-  
+  private volatile int ExpWarnDays = 0;
+
   @Override
   @Override
   public void initialize(URI name, Configuration conf) throws IOException {
   public void initialize(URI name, Configuration conf) throws IOException {
     super.initialize(name, conf);
     super.initialize(name, conf);
@@ -52,40 +59,76 @@ public class HsftpFileSystem extends HftpFileSystem {
     ExpWarnDays = conf.getInt("ssl.expiration.warn.days", 30);
     ExpWarnDays = conf.getInt("ssl.expiration.warn.days", 30);
   }
   }
 
 
-  /** Set up SSL resources */
-  private static void setupSsl(Configuration conf) {
+  /**
+   * Set up SSL resources
+   * 
+   * @throws IOException
+   */
+  private static void setupSsl(Configuration conf) throws IOException {
     Configuration sslConf = new Configuration(false);
     Configuration sslConf = new Configuration(false);
     sslConf.addResource(conf.get("dfs.https.client.keystore.resource",
     sslConf.addResource(conf.get("dfs.https.client.keystore.resource",
         "ssl-client.xml"));
         "ssl-client.xml"));
-    System.setProperty("javax.net.ssl.trustStore", sslConf.get(
-        "ssl.client.truststore.location", ""));
-    System.setProperty("javax.net.ssl.trustStorePassword", sslConf.get(
-        "ssl.client.truststore.password", ""));
-    System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
-        "ssl.client.truststore.type", "jks"));
-    System.setProperty("javax.net.ssl.keyStore", sslConf.get(
-        "ssl.client.keystore.location", ""));
-    System.setProperty("javax.net.ssl.keyStorePassword", sslConf.get(
-        "ssl.client.keystore.password", ""));
-    System.setProperty("javax.net.ssl.keyPassword", sslConf.get(
-        "ssl.client.keystore.keypassword", ""));
-    System.setProperty("javax.net.ssl.keyStoreType", sslConf.get(
-        "ssl.client.keystore.type", "jks"));
+    FileInputStream fis = null;
+    try {
+      SSLContext sc = SSLContext.getInstance("SSL");
+      KeyManager[] kms = null;
+      TrustManager[] tms = null;
+      if (sslConf.get("ssl.client.keystore.location") != null) {
+        // initialize default key manager with keystore file and pass
+        KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
+        KeyStore ks = KeyStore.getInstance(sslConf.get(
+            "ssl.client.keystore.type", "JKS"));
+        char[] ksPass = sslConf.get("ssl.client.keystore.password", "changeit")
+            .toCharArray();
+        fis = new FileInputStream(sslConf.get("ssl.client.keystore.location",
+            "keystore.jks"));
+        ks.load(fis, ksPass);
+        kmf.init(ks, sslConf.get("ssl.client.keystore.keypassword", "changeit")
+            .toCharArray());
+        kms = kmf.getKeyManagers();
+        fis.close();
+        fis = null;
+      }
+      // initialize default trust manager with truststore file and pass
+      if (conf.getBoolean("ssl.client.do.not.authenticate.server", false)) {
+        // by pass trustmanager validation
+        tms = new DummyTrustManager[] { new DummyTrustManager() };
+      } else {
+        TrustManagerFactory tmf = TrustManagerFactory.getInstance("PKIX");
+        KeyStore ts = KeyStore.getInstance(sslConf.get(
+            "ssl.client.truststore.type", "JKS"));
+        char[] tsPass = sslConf.get("ssl.client.truststore.password",
+            "changeit").toCharArray();
+        fis = new FileInputStream(sslConf.get("ssl.client.truststore.location",
+            "truststore.jks"));
+        ts.load(fis, tsPass);
+        tmf.init(ts);
+        tms = tmf.getTrustManagers();
+      }
+      sc.init(kms, tms, new java.security.SecureRandom());
+      HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
+    } catch (Exception e) {
+      throw new IOException("Could not initialize SSLContext", e);
+    } finally {
+      if (fis != null) {
+        fis.close();
+      }
+    }
   }
   }
-  
+
   @Override
   @Override
   protected HttpURLConnection openConnection(String path, String query)
   protected HttpURLConnection openConnection(String path, String query)
       throws IOException {
       throws IOException {
     try {
     try {
-      final URL url = new URI("https", null, nnAddr.getHostName(),
-          nnAddr.getPort(), path, query, null).toURL();
-      HttpsURLConnection conn = (HttpsURLConnection)url.openConnection();
+      final URL url = new URI("https", null, nnAddr.getHostName(), nnAddr
+          .getPort(), path, query, null).toURL();
+      HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
       // bypass hostname verification
       // bypass hostname verification
       conn.setHostnameVerifier(new DummyHostnameVerifier());
       conn.setHostnameVerifier(new DummyHostnameVerifier());
       conn.setRequestMethod("GET");
       conn.setRequestMethod("GET");
       conn.connect();
       conn.connect();
-     
-   // check cert expiration date
+
+      // check cert expiration date
       final int warnDays = ExpWarnDays;
       final int warnDays = ExpWarnDays;
       if (warnDays > 0) { // make sure only check once
       if (warnDays > 0) { // make sure only check once
         ExpWarnDays = 0;
         ExpWarnDays = 0;
@@ -100,16 +143,16 @@ public class HsftpFileSystem extends HftpFileSystem {
               StringBuffer sb = new StringBuffer();
               StringBuffer sb = new StringBuffer();
               sb.append("\n Client certificate "
               sb.append("\n Client certificate "
                   + cert.getSubjectX500Principal().getName());
                   + cert.getSubjectX500Principal().getName());
-              int dayOffSet = (int) ((expTime - System.currentTimeMillis())/MM_SECONDS_PER_DAY);
+              int dayOffSet = (int) ((expTime - System.currentTimeMillis()) / MM_SECONDS_PER_DAY);
               sb.append(" have " + dayOffSet + " days to expire");
               sb.append(" have " + dayOffSet + " days to expire");
               LOG.warn(sb.toString());
               LOG.warn(sb.toString());
             }
             }
           }
           }
-        }        
+        }
       }
       }
-      return (HttpURLConnection)conn;
+      return (HttpURLConnection) conn;
     } catch (URISyntaxException e) {
     } catch (URISyntaxException e) {
-      throw (IOException)new IOException().initCause(e);
+      throw (IOException) new IOException().initCause(e);
     }
     }
   }
   }
 
 
@@ -117,10 +160,10 @@ public class HsftpFileSystem extends HftpFileSystem {
   public URI getUri() {
   public URI getUri() {
     try {
     try {
       return new URI("hsftp", null, nnAddr.getHostName(), nnAddr.getPort(),
       return new URI("hsftp", null, nnAddr.getHostName(), nnAddr.getPort(),
-                     null, null, null);
+          null, null, null);
     } catch (URISyntaxException e) {
     } catch (URISyntaxException e) {
       return null;
       return null;
-    } 
+    }
   }
   }
 
 
   /**
   /**
@@ -132,4 +175,19 @@ public class HsftpFileSystem extends HftpFileSystem {
     }
     }
   }
   }
 
 
+  /**
+   * Dummy trustmanager that is used to trust all server certificates
+   */
+  protected static class DummyTrustManager implements X509TrustManager {
+    public void checkClientTrusted(X509Certificate[] chain, String authType) {
+    }
+
+    public void checkServerTrusted(X509Certificate[] chain, String authType) {
+    }
+
+    public X509Certificate[] getAcceptedIssuers() {
+      return null;
+    }
+  }
+
 }
 }