Преглед изворни кода

HDFS-14845. Ignore AuthenticationFilterInitializer for HttpFSServerWebServer and honor hadoop.http.authentication configs.

Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
Prabhu Joseph пре 5 година
родитељ
комит
3f89084ac7

+ 4 - 0
hadoop-common-project/hadoop-common/src/site/markdown/DeprecatedProperties.md

@@ -74,6 +74,10 @@ The following table lists the configuration property names that are deprecated i
 | hadoop.pipes.java.reducer | mapreduce.pipes.isjavareducer |
 | hadoop.pipes.java.reducer | mapreduce.pipes.isjavareducer |
 | hadoop.pipes.partitioner | mapreduce.pipes.partitioner |
 | hadoop.pipes.partitioner | mapreduce.pipes.partitioner |
 | heartbeat.recheck.interval | dfs.namenode.heartbeat.recheck-interval |
 | heartbeat.recheck.interval | dfs.namenode.heartbeat.recheck-interval |
+| httpfs.authentication.kerberos.keytab | hadoop.http.authentication.kerberos.keytab |
+| httpfs.authentication.kerberos.principal | hadoop.http.authentication.kerberos.principal |
+| httpfs.authentication.signature.secret.file | hadoop.http.authentication.signature.secret.file |
+| httpfs.authentication.type | hadoop.http.authentication.type |
 | io.bytes.per.checksum | dfs.bytes-per-checksum |
 | io.bytes.per.checksum | dfs.bytes-per-checksum |
 | io.sort.factor | mapreduce.task.io.sort.factor |
 | io.sort.factor | mapreduce.task.io.sort.factor |
 | io.sort.mb | mapreduce.task.io.sort.mb |
 | io.sort.mb | mapreduce.task.io.sort.mb |

+ 14 - 2
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java

@@ -48,6 +48,8 @@ public class HttpFSAuthenticationFilter
 
 
   static final String CONF_PREFIX = "httpfs.authentication.";
   static final String CONF_PREFIX = "httpfs.authentication.";
 
 
+  static final String HADOOP_HTTP_CONF_PREFIX = "hadoop.http.authentication.";
+
   private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET
   private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET
       + ".file";
       + ".file";
 
 
@@ -55,8 +57,9 @@ public class HttpFSAuthenticationFilter
    * Returns the hadoop-auth configuration from HttpFSServer's configuration.
    * Returns the hadoop-auth configuration from HttpFSServer's configuration.
    * <p>
    * <p>
    * It returns all HttpFSServer's configuration properties prefixed with
    * It returns all HttpFSServer's configuration properties prefixed with
-   * <code>httpfs.authentication</code>. The <code>httpfs.authentication</code>
-   * prefix is removed from the returned property names.
+   * <code>hadoop.http.authentication</code>. The
+   * <code>hadoop.http.authentication</code> prefix is removed from the
+   * returned property names.
    *
    *
    * @param configPrefix parameter not used.
    * @param configPrefix parameter not used.
    * @param filterConfig parameter not used.
    * @param filterConfig parameter not used.
@@ -70,6 +73,15 @@ public class HttpFSAuthenticationFilter
     Configuration conf = HttpFSServerWebApp.get().getConfig();
     Configuration conf = HttpFSServerWebApp.get().getConfig();
 
 
     props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
     props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
+    for (Map.Entry<String, String> entry : conf) {
+      String name = entry.getKey();
+      if (name.startsWith(HADOOP_HTTP_CONF_PREFIX)) {
+        name = name.substring(HADOOP_HTTP_CONF_PREFIX.length());
+        props.setProperty(name, entry.getValue());
+      }
+    }
+
+    // Replace Hadoop Http Authentication Configs with HttpFS specific Configs
     for (Map.Entry<String, String> entry : conf) {
     for (Map.Entry<String, String> entry : conf) {
       String name = entry.getKey();
       String name = entry.getKey();
       if (name.startsWith(CONF_PREFIX)) {
       if (name.startsWith(CONF_PREFIX)) {

+ 22 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebServer.java

@@ -24,11 +24,15 @@ import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URI;
 import java.net.URL;
 import java.net.URL;
+import java.util.LinkedHashSet;
+import java.util.Set;
 
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.ConfigurationWithLogging;
 import org.apache.hadoop.conf.ConfigurationWithLogging;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.http.HttpServer2;
+import org.apache.hadoop.security.AuthenticationFilterInitializer;
+import org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilterInitializer;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.security.ssl.SSLFactory;
 import org.apache.hadoop.security.ssl.SSLFactory;
 import org.slf4j.Logger;
 import org.slf4j.Logger;
@@ -98,6 +102,24 @@ public class HttpFSServerWebServer {
     int port = conf.getInt(HTTP_PORT_KEY, HTTP_PORT_DEFAULT);
     int port = conf.getInt(HTTP_PORT_KEY, HTTP_PORT_DEFAULT);
     URI endpoint = new URI(scheme, null, host, port, null, null, null);
     URI endpoint = new URI(scheme, null, host, port, null, null, null);
 
 
+    // Allow the default authFilter HttpFSAuthenticationFilter
+    String configuredInitializers = conf.get(HttpServer2.
+        FILTER_INITIALIZER_PROPERTY);
+    if (configuredInitializers != null) {
+      Set<String> target = new LinkedHashSet<String>();
+      String[] parts = configuredInitializers.split(",");
+      for (String filterInitializer : parts) {
+        if (!filterInitializer.equals(AuthenticationFilterInitializer.class.
+            getName()) && !filterInitializer.equals(
+            ProxyUserAuthenticationFilterInitializer.class.getName())) {
+          target.add(filterInitializer);
+        }
+      }
+      String actualInitializers =
+          org.apache.commons.lang3.StringUtils.join(target, ",");
+      conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY, actualInitializers);
+    }
+
     httpServer = new HttpServer2.Builder()
     httpServer = new HttpServer2.Builder()
         .setName(NAME)
         .setName(NAME)
         .setConf(conf)
         .setConf(conf)

+ 16 - 4
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml

@@ -148,7 +148,7 @@
   </property>
   </property>
 
 
   <property>
   <property>
-    <name>httpfs.authentication.signature.secret.file</name>
+    <name>hadoop.http.authentication.signature.secret.file</name>
     <value>${httpfs.config.dir}/httpfs-signature.secret</value>
     <value>${httpfs.config.dir}/httpfs-signature.secret</value>
     <description>
     <description>
       File containing the secret to sign HttpFS hadoop-auth cookies.
       File containing the secret to sign HttpFS hadoop-auth cookies.
@@ -160,11 +160,14 @@
 
 
       If the secret file specified here does not exist, random secret is
       If the secret file specified here does not exist, random secret is
       generated at startup time.
       generated at startup time.
+
+      httpfs.authentication.signature.secret.file is deprecated. Instead use
+      hadoop.http.authentication.signature.secret.file.
     </description>
     </description>
   </property>
   </property>
 
 
   <property>
   <property>
-    <name>httpfs.authentication.type</name>
+    <name>hadoop.http.authentication.type</name>
     <value>simple</value>
     <value>simple</value>
     <description>
     <description>
       Defines the authentication mechanism used by httpfs for its HTTP clients.
       Defines the authentication mechanism used by httpfs for its HTTP clients.
@@ -175,26 +178,35 @@
       'user.name' query string parameter.
       'user.name' query string parameter.
 
 
       If using 'kerberos' HTTP clients must use HTTP SPNEGO or delegation tokens.
       If using 'kerberos' HTTP clients must use HTTP SPNEGO or delegation tokens.
+
+      httpfs.authentication.type is deprecated. Instead use
+      hadoop.http.authentication.type.
     </description>
     </description>
   </property>
   </property>
 
 
   <property>
   <property>
-    <name>httpfs.authentication.kerberos.principal</name>
+    <name>hadoop.http.authentication.kerberos.principal</name>
     <value>HTTP/${httpfs.hostname}@${kerberos.realm}</value>
     <value>HTTP/${httpfs.hostname}@${kerberos.realm}</value>
     <description>
     <description>
       The HTTP Kerberos principal used by HttpFS in the HTTP endpoint.
       The HTTP Kerberos principal used by HttpFS in the HTTP endpoint.
 
 
       The HTTP Kerberos principal MUST start with 'HTTP/' per Kerberos
       The HTTP Kerberos principal MUST start with 'HTTP/' per Kerberos
       HTTP SPNEGO specification.
       HTTP SPNEGO specification.
+
+      httpfs.authentication.kerberos.principal is deprecated. Instead use
+      hadoop.http.authentication.kerberos.principal.
     </description>
     </description>
   </property>
   </property>
 
 
   <property>
   <property>
-    <name>httpfs.authentication.kerberos.keytab</name>
+    <name>hadoop.http.authentication.kerberos.keytab</name>
     <value>${user.home}/httpfs.keytab</value>
     <value>${user.home}/httpfs.keytab</value>
     <description>
     <description>
       The Kerberos keytab file with the credentials for the
       The Kerberos keytab file with the credentials for the
       HTTP Kerberos principal used by httpfs in the HTTP endpoint.
       HTTP Kerberos principal used by httpfs in the HTTP endpoint.
+
+      httpfs.authentication.kerberos.keytab is deprecated. Instead use
+      hadoop.http.authentication.kerberos.keytab.
     </description>
     </description>
   </property>
   </property>