Browse Source

HDFS-2707. HttpFS should read the hadoop-auth secret from a file instead inline from the configuration. (tucu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1224794 13f79535-47bb-0310-9956-ffa450edef68
Alejandro Abdelnur 13 years ago
parent
commit
ae0d48854d

+ 1 - 1
hadoop-common-project/hadoop-auth-examples/src/main/webapp/WEB-INF/web.xml

@@ -86,7 +86,7 @@
     </init-param>
     <init-param>
       <param-name>kerberos.keytab</param-name>
-      <param-value>/tmp/alfredo.keytab</param-value>
+      <param-value>/tmp/my.keytab</param-value>
     </init-param>
     <init-param>
       <param-name>token.validity</param-name>

+ 3 - 3
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java

@@ -459,7 +459,7 @@ public class TestAuthenticationFilter extends TestCase {
 
       AuthenticationToken token = new AuthenticationToken("u", "p", "t");
       token.setExpires(System.currentTimeMillis() + 1000);
-      Signer signer = new Signer("alfredo".getBytes());
+      Signer signer = new Signer("secret".getBytes());
       String tokenSigned = signer.sign(token.toString());
 
       Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
@@ -504,7 +504,7 @@ public class TestAuthenticationFilter extends TestCase {
 
       AuthenticationToken token = new AuthenticationToken("u", "p", DummyAuthenticationHandler.TYPE);
       token.setExpires(System.currentTimeMillis() - 1000);
-      Signer signer = new Signer("alfredo".getBytes());
+      Signer signer = new Signer("secret".getBytes());
       String tokenSigned = signer.sign(token.toString());
 
       Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
@@ -564,7 +564,7 @@ public class TestAuthenticationFilter extends TestCase {
 
       AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype");
       token.setExpires(System.currentTimeMillis() + 1000);
-      Signer signer = new Signer("alfredo".getBytes());
+      Signer signer = new Signer("secret".getBytes());
       String tokenSigned = signer.sign(token.toString());
 
       Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml

@@ -52,7 +52,7 @@
       <p>
         If a custom authentication mechanism is required for the HTTP web-consoles, it is possible 
         to implement a plugin to support the alternate authentication mechanism (refer to 
-        Hadoop Alfredo for details on writing an <code>AuthenticatorHandler</code>).
+        Hadoop hadoop-auth for details on writing an <code>AuthenticatorHandler</code>).
       </p>
       <p>       
         The next section describes how to configure Hadoop HTTP web-consoles to require user 

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java

@@ -29,7 +29,7 @@ import java.util.HashMap;
 import java.util.Map;
 
 /**
- * Initializes Alfredo AuthenticationFilter which provides support for
+ * Initializes hadoop-auth AuthenticationFilter which provides support for
  * Kerberos HTTP SPENGO authentication.
  * <p/>
  * It enables anonymous access, simple/speudo and Kerberos HTTP SPNEGO
@@ -48,9 +48,9 @@ public class AuthenticationFilterInitializer extends FilterInitializer {
   static final String SIGNATURE_SECRET_FILE = AuthenticationFilter.SIGNATURE_SECRET + ".file";
   
   /**
-   * Initializes Alfredo AuthenticationFilter.
+   * Initializes hadoop-auth AuthenticationFilter.
    * <p/>
-   * Propagates to Alfredo AuthenticationFilter configuration all Hadoop
+   * Propagates to hadoop-auth AuthenticationFilter configuration all Hadoop
    * configuration properties prefixed with "hadoop.http.authentication."
    *
    * @param container The filter container

+ 1 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-signature.secret

@@ -0,0 +1 @@
+hadoop httpfs secret

+ 27 - 3
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java

@@ -21,18 +21,23 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 
 import javax.servlet.FilterConfig;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
 import java.util.Map;
 import java.util.Properties;
 
 /**
- * Subclass of Alfredo's <code>AuthenticationFilter</code> that obtains its configuration
+ * Subclass of hadoop-auth <code>AuthenticationFilter</code> that obtains its configuration
  * from HttpFSServer's server configuration.
  */
 public class AuthFilter extends AuthenticationFilter {
   private static final String CONF_PREFIX = "httpfs.authentication.";
 
+  private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + ".file";
+
   /**
-   * Returns the Alfredo configuration from HttpFSServer's configuration.
+   * Returns the hadoop-auth configuration from HttpFSServer's configuration.
    * <p/>
    * It returns all HttpFSServer's configuration properties prefixed with
    * <code>httpfs.authentication</code>. The <code>httpfs.authentication</code>
@@ -41,7 +46,7 @@ public class AuthFilter extends AuthenticationFilter {
    * @param configPrefix parameter not used.
    * @param filterConfig parameter not used.
    *
-   * @return Alfredo configuration read from HttpFSServer's configuration.
+   * @return hadoop-auth configuration read from HttpFSServer's configuration.
    */
   @Override
   protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
@@ -57,6 +62,25 @@ public class AuthFilter extends AuthenticationFilter {
         props.setProperty(name, value);
       }
     }
+
+    String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
+    if (signatureSecretFile == null) {
+      throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
+    }
+
+    try {
+      StringBuilder secret = new StringBuilder();
+      Reader reader = new FileReader(signatureSecretFile);
+      int c = reader.read();
+      while (c > -1) {
+        secret.append((char)c);
+        c = reader.read();
+      }
+      reader.close();
+      props.setProperty(AuthenticationFilter.SIGNATURE_SECRET, secret.toString());
+    } catch (IOException ex) {
+      throw new RuntimeException("Could not read HttpFS signature secret file: " + signatureSecretFile);
+    }
     return props;
   }
 

+ 13 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml

@@ -69,6 +69,19 @@
     </description>
   </property>
 
+  <property>
+    <name>httpfs.authentication.signature.secret.file</name>
+    <value>${httpfs.config.dir}/httpfs-signature.secret</value>
+    <description>
+      File containing the secret to sign HttpFS hadoop-auth cookies.
+
+      This file should be readable only by the system user running HttpFS service.
+
+      If multiple HttpFS servers are used in a load-balancer/round-robin fashion,
+      they should share the secret file.
+    </description>
+  </property>
+
   <property>
     <name>httpfs.authentication.type</name>
     <value>simple</value>

+ 8 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java

@@ -45,9 +45,11 @@ import org.mortbay.jetty.webapp.WebAppContext;
 
 import java.io.File;
 import java.io.FileOutputStream;
+import java.io.FileWriter;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.io.Writer;
 import java.net.URL;
 import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
@@ -63,6 +65,11 @@ public class TestHttpFSFileSystem extends HFSTestCase {
     Assert.assertTrue(new File(homeDir, "temp").mkdir());
     HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
 
+    File secretFile = new File(new File(homeDir, "conf"), "secret");
+    Writer w = new FileWriter(secretFile);
+    w.write("secret");
+    w.close();
+
     String fsDefaultName = TestHdfsHelper.getHdfsConf().get("fs.default.name");
     Configuration conf = new Configuration(false);
     conf.set("httpfs.hadoop.conf:fs.default.name", fsDefaultName);
@@ -70,6 +77,7 @@ public class TestHttpFSFileSystem extends HFSTestCase {
       .getHadoopProxyUserGroups());
     conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper
       .getHadoopProxyUserHosts());
+    conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
     File hoopSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
     OutputStream os = new FileOutputStream(hoopSite);
     conf.writeXml(os);

+ 8 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java

@@ -39,8 +39,10 @@ import org.mortbay.jetty.webapp.WebAppContext;
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileOutputStream;
+import java.io.FileWriter;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
+import java.io.Writer;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.text.MessageFormat;
@@ -65,10 +67,16 @@ public class TestHttpFSServer extends HFSTestCase {
     Assert.assertTrue(new File(homeDir, "temp").mkdir());
     HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
 
+    File secretFile = new File(new File(homeDir, "conf"), "secret");
+    Writer w = new FileWriter(secretFile);
+    w.write("secret");
+    w.close();
+
     String fsDefaultName = TestHdfsHelper.getHdfsConf().get("fs.default.name");
     Configuration conf = new Configuration(false);
     conf.set("httpfs.hadoop.conf:fs.default.name", fsDefaultName);
     conf.set("httpfs.groups." + CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, DummyGroupMapping.class.getName());
+    conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
     File hoopSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
     OutputStream os = new FileOutputStream(hoopSite);
     conf.writeXml(os);

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -188,6 +188,9 @@ Trunk (unreleased changes)
     HttpFS server should check that upload requests have correct 
     content-type. (tucu)
 
+    HDFS-2707. HttpFS should read the hadoop-auth secret from a file 
+    instead inline from the configuration. (tucu)
+
 Release 0.23.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES