瀏覽代碼

HDFS-13654. Use a random secret when a secret file doesn't exist in HttpFS. This should be default.

Takanobu Asanuma 6 年之前
父節點
當前提交
35f1014b3e

+ 0 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml

@@ -304,7 +304,6 @@
         <configuration>
           <excludes>
             <exclude>src/test/resources/classutils.txt</exclude>
-            <exclude>src/main/conf/httpfs-signature.secret</exclude>
           </excludes>
         </configuration>
       </plugin>

+ 0 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-signature.secret

@@ -1 +0,0 @@
-hadoop httpfs secret

+ 30 - 16
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java

@@ -21,6 +21,8 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.web.WebHdfsConstants;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
+import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
 import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationFilter;
 import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticationHandler;
 
@@ -37,8 +39,8 @@ import java.util.Map;
 import java.util.Properties;
 
 /**
- * Subclass of hadoop-auth <code>AuthenticationFilter</code> that obtains its configuration
- * from HttpFSServer's server configuration.
+ * Subclass of hadoop-auth <code>AuthenticationFilter</code> that obtains its
+ * configuration from HttpFSServer's server configuration.
  */
 @InterfaceAudience.Private
 public class HttpFSAuthenticationFilter
@@ -46,7 +48,8 @@ public class HttpFSAuthenticationFilter
 
   static final String CONF_PREFIX = "httpfs.authentication.";
 
-  private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + ".file";
+  private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET
+      + ".file";
 
   /**
    * Returns the hadoop-auth configuration from HttpFSServer's configuration.
@@ -78,22 +81,25 @@ public class HttpFSAuthenticationFilter
 
     String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
     if (signatureSecretFile == null) {
-      throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
+      throw new RuntimeException("Undefined property: "
+          + SIGNATURE_SECRET_FILE);
     }
 
-    try {
-      StringBuilder secret = new StringBuilder();
-      Reader reader = new InputStreamReader(Files.newInputStream(Paths.get(
-          signatureSecretFile)), StandardCharsets.UTF_8);
-      int c = reader.read();
-      while (c > -1) {
-        secret.append((char)c);
-        c = reader.read();
+    if (!isRandomSecret(filterConfig)) {
+      try (Reader reader = new InputStreamReader(Files.newInputStream(
+          Paths.get(signatureSecretFile)), StandardCharsets.UTF_8)) {
+        StringBuilder secret = new StringBuilder();
+        int c = reader.read();
+        while (c > -1) {
+          secret.append((char) c);
+          c = reader.read();
+        }
+        props.setProperty(AuthenticationFilter.SIGNATURE_SECRET,
+            secret.toString());
+      } catch (IOException ex) {
+        throw new RuntimeException("Could not read HttpFS signature "
+            + "secret file: " + signatureSecretFile);
       }
-      reader.close();
-      props.setProperty(AuthenticationFilter.SIGNATURE_SECRET, secret.toString());
-    } catch (IOException ex) {
-      throw new RuntimeException("Could not read HttpFS signature secret file: " + signatureSecretFile);
     }
     setAuthHandlerClass(props);
     String dtkind = WebHdfsConstants.WEBHDFS_TOKEN_KIND.toString();
@@ -115,4 +121,12 @@ public class HttpFSAuthenticationFilter
     return conf;
   }
 
+  private boolean isRandomSecret(FilterConfig filterConfig) {
+    SignerSecretProvider secretProvider = (SignerSecretProvider) filterConfig
+        .getServletContext().getAttribute(SIGNER_SECRET_PROVIDER_ATTRIBUTE);
+    if (secretProvider == null) {
+      return false;
+    }
+    return secretProvider.getClass() == RandomSignerSecretProvider.class;
+  }
 }

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml

@@ -157,6 +157,9 @@
 
       If multiple HttpFS servers are used in a load-balancer/round-robin fashion,
       they should share the secret file.
+
+      If the secret file specified here does not exist, random secret is
+      generated at startup time.
     </description>
   </property>
 

+ 58 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServerWithRandomSecret.java

@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.server;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.Shell;
+import org.junit.BeforeClass;
+
+import java.io.File;
+
+/**
+ * Unlike {@link TestHttpFSServerWebServer}, httpfs-signature.secret doesn't
+ * exist. In this case, a random secret is used.
+ */
+public class TestHttpFSServerWebServerWithRandomSecret extends
+    TestHttpFSServerWebServer {
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    File homeDir = GenericTestUtils.getTestDir();
+    File confDir = new File(homeDir, "etc/hadoop");
+    File logsDir = new File(homeDir, "logs");
+    File tempDir = new File(homeDir, "temp");
+    confDir.mkdirs();
+    logsDir.mkdirs();
+    tempDir.mkdirs();
+
+    if (Shell.WINDOWS) {
+      File binDir = new File(homeDir, "bin");
+      binDir.mkdirs();
+      File winutils = Shell.getWinUtilsFile();
+      if (winutils.exists()) {
+        FileUtils.copyFileToDirectory(winutils, binDir);
+      }
+    }
+
+    System.setProperty("hadoop.home.dir", homeDir.getAbsolutePath());
+    System.setProperty("hadoop.log.dir", logsDir.getAbsolutePath());
+    System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath());
+    System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath());
+    System.setProperty("httpfs.config.dir", confDir.getAbsolutePath());
+  }
+}