1
0
Преглед на файлове

HADOOP-16354. Enable AuthFilter as default for WebHDFS.
Contributed by Prabhu Joseph

Eric Yang преди 6 години
родител
ревизия
4ea6c2f457

+ 86 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authentication/server/ProxyUserAuthenticationFilter.java

@@ -18,12 +18,18 @@ import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.HttpExceptionUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.security.Principal;
+import java.util.ArrayList;
 import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
 import javax.servlet.ServletException;
@@ -41,7 +47,7 @@ public class ProxyUserAuthenticationFilter extends AuthenticationFilter {
   private static final Logger LOG = LoggerFactory.getLogger(
       ProxyUserAuthenticationFilter.class);
 
-  private static final String DO_AS = "doAs";
+  private static final String DO_AS = "doas";
   public static final String PROXYUSER_PREFIX = "proxyuser";
 
   @Override
@@ -54,8 +60,9 @@ public class ProxyUserAuthenticationFilter extends AuthenticationFilter {
   @Override
   protected void doFilter(FilterChain filterChain, HttpServletRequest request,
       HttpServletResponse response) throws IOException, ServletException {
+    final HttpServletRequest lowerCaseRequest = toLowerCase(request);
+    String doAsUser = lowerCaseRequest.getParameter(DO_AS);
 
-    String doAsUser = request.getParameter(DO_AS);
     if (doAsUser != null && !doAsUser.equals(request.getRemoteUser())) {
       LOG.debug("doAsUser = {}, RemoteUser = {} , RemoteAddress = {} ",
           doAsUser, request.getRemoteUser(), request.getRemoteAddr());
@@ -111,5 +118,82 @@ public class ProxyUserAuthenticationFilter extends AuthenticationFilter {
     return conf;
   }
 
+  static boolean containsUpperCase(final Iterable<String> strings) {
+    for(String s : strings) {
+      for(int i = 0; i < s.length(); i++) {
+        if (Character.isUpperCase(s.charAt(i))) {
+          return true;
+        }
+      }
+    }
+    return false;
+  }
+
+  public static HttpServletRequest toLowerCase(
+      final HttpServletRequest request) {
+    @SuppressWarnings("unchecked")
+    final Map<String, String[]> original = (Map<String, String[]>)
+        request.getParameterMap();
+    if (!containsUpperCase(original.keySet())) {
+      return request;
+    }
+
+    final Map<String, List<String>> m = new HashMap<String, List<String>>();
+    for (Map.Entry<String, String[]> entry : original.entrySet()) {
+      final String key = StringUtils.toLowerCase(entry.getKey());
+      List<String> strings = m.get(key);
+      if (strings == null) {
+        strings = new ArrayList<String>();
+        m.put(key, strings);
+      }
+      for (String v : entry.getValue()) {
+        strings.add(v);
+      }
+    }
+
+    return new HttpServletRequestWrapper(request) {
+      private Map<String, String[]> parameters = null;
+
+      @Override
+      public Map<String, String[]> getParameterMap() {
+        if (parameters == null) {
+          parameters = new HashMap<String, String[]>();
+          for (Map.Entry<String, List<String>> entry : m.entrySet()) {
+            final List<String> a = entry.getValue();
+            parameters.put(entry.getKey(), a.toArray(new String[a.size()]));
+          }
+        }
+        return parameters;
+      }
+
+      @Override
+      public String getParameter(String name) {
+        final List<String> a = m.get(name);
+        return a == null ? null : a.get(0);
+      }
+
+      @Override
+      public String[] getParameterValues(String name) {
+        return getParameterMap().get(name);
+      }
+
+      @Override
+      public Enumeration<String> getParameterNames() {
+        final Iterator<String> i = m.keySet().iterator();
+        return new Enumeration<String>() {
+          @Override
+          public boolean hasMoreElements() {
+            return i.hasNext();
+          }
+
+          @Override
+          public String nextElement() {
+            return i.next();
+          }
+        };
+      }
+    };
+  }
+
 }
 

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authentication/server/TestProxyUserAuthenticationFilter.java

@@ -105,7 +105,7 @@ public class TestProxyUserAuthenticationFilter {
 
     HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
     Mockito.when(request.getRemoteUser()).thenReturn("knox");
-    Mockito.when(request.getParameter("doAs")).thenReturn("testuser");
+    Mockito.when(request.getParameter("doas")).thenReturn("testuser");
     Mockito.when(request.getRemoteAddr()).thenReturn("127.0.0.1");
     Mockito.when(request.getUserPrincipal()).thenReturn(new Principal() {
       @Override

+ 27 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java

@@ -53,6 +53,7 @@ import java.util.Collection;
 import java.util.Comparator;
 import java.util.Date;
 import java.util.HashSet;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -63,6 +64,7 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.PosixParser;
+import org.apache.commons.lang3.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -78,13 +80,16 @@ import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifie
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
 import org.apache.hadoop.hdfs.server.common.Util;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.web.AuthFilterInitializer;
 import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.ipc.ProtobufRpcEngine;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.AuthenticationFilterInitializer;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilterInitializer;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.ToolRunner;
@@ -1609,6 +1614,28 @@ public class DFSUtil {
       String spnegoKeytabFileKey) throws IOException {
     HttpConfig.Policy policy = getHttpPolicy(conf);
 
+    String filterInitializerConfKey = "hadoop.http.filter.initializers";
+    String initializers = conf.get(filterInitializerConfKey, "");
+
+    String[] parts = initializers.split(",");
+    Set<String> target = new LinkedHashSet<String>();
+    for (String filterInitializer : parts) {
+      filterInitializer = filterInitializer.trim();
+      if (filterInitializer.equals(
+          AuthenticationFilterInitializer.class.getName()) ||
+          filterInitializer.equals(
+          ProxyUserAuthenticationFilterInitializer.class.getName()) ||
+          filterInitializer.isEmpty()) {
+        continue;
+      }
+      target.add(filterInitializer);
+    }
+    target.add(AuthFilterInitializer.class.getName());
+    initializers = StringUtils.join(target, ",");
+    conf.set(filterInitializerConfKey, initializers);
+
+    LOG.info("Filter initializers set : " + initializers);
+
     HttpServer2.Builder builder = new HttpServer2.Builder().setName(name)
         .setConf(conf).setACL(new AccessControlList(conf.get(DFS_ADMIN, " ")))
         .setSecurityEnabled(UserGroupInformation.isSecurityEnabled())

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java

@@ -139,7 +139,7 @@ public class JspHelper {
         // filter
         ugi.setAuthenticationMethod(secureAuthMethod);
       }
-      if (doAsUserFromQuery != null) {
+      if (doAsUserFromQuery != null && !doAsUserFromQuery.equals(remoteUser)) {
         // create and attempt to authorize a proxy user
         ugi = UserGroupInformation.createProxyUser(doAsUserFromQuery, ugi);
         ProxyUsers.authorize(ugi, getRemoteAddr(request));

+ 0 - 50
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java

@@ -22,10 +22,7 @@ import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_RES
 
 import java.io.IOException;
 import java.net.InetSocketAddress;
-import java.util.HashMap;
-import java.util.Iterator;
 import java.util.Map;
-import java.util.Map.Entry;
 
 import javax.servlet.ServletContext;
 
@@ -41,7 +38,6 @@ import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.server.common.TokenVerifier;
 import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
 import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
-import org.apache.hadoop.hdfs.web.AuthFilter;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
 import org.apache.hadoop.hdfs.web.resources.AclPermissionParam;
 import org.apache.hadoop.hdfs.web.resources.Param;
@@ -49,8 +45,6 @@ import org.apache.hadoop.hdfs.web.resources.UserParam;
 import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
 
 /**
@@ -183,50 +177,6 @@ public class NameNodeHttpServer {
           NetUtils.getHostPortString(httpsAddress));
     }
   }
-  
-  private static Map<String, String> getAuthFilterParams(Configuration conf,
-      String hostname, String httpKeytab) throws IOException {
-    Map<String, String> params = new HashMap<String, String>();
-    // Select configs beginning with 'dfs.web.authentication.'
-    Iterator<Map.Entry<String, String>> iterator = conf.iterator();
-    while (iterator.hasNext()) {
-      Entry<String, String> kvPair = iterator.next();
-      if (kvPair.getKey().startsWith(AuthFilter.CONF_PREFIX)) {
-        params.put(kvPair.getKey(), kvPair.getValue());
-      }
-    }
-    String principalInConf = conf
-        .get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
-    if (principalInConf != null && !principalInConf.isEmpty()) {
-      params
-          .put(
-              DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
-              SecurityUtil.getServerPrincipal(principalInConf, hostname));
-    } else if (UserGroupInformation.isSecurityEnabled()) {
-      HttpServer2.LOG.error(
-          "WebHDFS and security are enabled, but configuration property '" +
-          DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY +
-          "' is not set.");
-    }
-    if (httpKeytab != null && !httpKeytab.isEmpty()) {
-      params.put(
-          DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
-          httpKeytab);
-    } else if (UserGroupInformation.isSecurityEnabled()) {
-      HttpServer2.LOG.error(
-          "WebHDFS and security are enabled, but configuration property '" +
-          DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY +
-          "' is not set.");
-    }
-    String anonymousAllowed = conf
-      .get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_SIMPLE_ANONYMOUS_ALLOWED);
-    if (anonymousAllowed != null && !anonymousAllowed.isEmpty()) {
-    params.put(
-        DFSConfigKeys.DFS_WEB_AUTHENTICATION_SIMPLE_ANONYMOUS_ALLOWED,
-        anonymousAllowed);
-    }
-    return params;
-  }
 
   /**
    * Joins the httpserver.

+ 7 - 108
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java

@@ -18,138 +18,37 @@
 package org.apache.hadoop.hdfs.web;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
 
 import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
 import javax.servlet.ServletException;
 import javax.servlet.ServletRequest;
 import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
 
 import org.apache.hadoop.hdfs.web.resources.DelegationParam;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
-import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilter;
 
 /**
  * Subclass of {@link AuthenticationFilter} that
  * obtains Hadoop-Auth configuration for webhdfs.
  */
-public class AuthFilter extends AuthenticationFilter {
-  public static final String CONF_PREFIX = "dfs.web.authentication.";
-
-  /**
-   * Returns the filter configuration properties,
-   * including the ones prefixed with {@link #CONF_PREFIX}.
-   * The prefix is removed from the returned property names.
-   *
-   * @param prefix parameter not used.
-   * @param config parameter contains the initialization values.
-   * @return Hadoop-Auth configuration properties.
-   * @throws ServletException 
-   */
-  @Override
-  protected Properties getConfiguration(String prefix, FilterConfig config)
-      throws ServletException {
-    final Properties p = super.getConfiguration(CONF_PREFIX, config);
-    // if not set, configure based on security enabled
-    if (p.getProperty(AUTH_TYPE) == null) {
-      p.setProperty(AUTH_TYPE, UserGroupInformation.isSecurityEnabled()?
-          KerberosAuthenticationHandler.TYPE: PseudoAuthenticationHandler.TYPE);
-    }
-    // if not set, enable anonymous for pseudo authentication
-    if (p.getProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED) == null) {
-      p.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "true");
-    }
-    //set cookie path
-    p.setProperty(COOKIE_PATH, "/");
-    return p;
-  }
+public class AuthFilter extends ProxyUserAuthenticationFilter {
 
   @Override
   public void doFilter(ServletRequest request, ServletResponse response,
       FilterChain filterChain) throws IOException, ServletException {
-    final HttpServletRequest httpRequest = toLowerCase((HttpServletRequest)request);
+    final HttpServletRequest httpRequest = ProxyUserAuthenticationFilter.
+        toLowerCase((HttpServletRequest)request);
     final String tokenString = httpRequest.getParameter(DelegationParam.NAME);
-    if (tokenString != null) {
+    if (tokenString != null && httpRequest.getServletPath().startsWith(
+        WebHdfsFileSystem.PATH_PREFIX)) {
       //Token is present in the url, therefore token will be used for
       //authentication, bypass kerberos authentication.
       filterChain.doFilter(httpRequest, response);
       return;
     }
-    super.doFilter(httpRequest, response, filterChain);
+    super.doFilter(request, response, filterChain);
   }
 
-  private static HttpServletRequest toLowerCase(final HttpServletRequest request) {
-    @SuppressWarnings("unchecked")
-    final Map<String, String[]> original = (Map<String, String[]>)request.getParameterMap();
-    if (!ParamFilter.containsUpperCase(original.keySet())) {
-      return request;
-    }
-
-    final Map<String, List<String>> m = new HashMap<String, List<String>>();
-    for(Map.Entry<String, String[]> entry : original.entrySet()) {
-      final String key = StringUtils.toLowerCase(entry.getKey());
-      List<String> strings = m.get(key);
-      if (strings == null) {
-        strings = new ArrayList<String>();
-        m.put(key, strings);
-      }
-      for(String v : entry.getValue()) {
-        strings.add(v);
-      }
-    }
-
-    return new HttpServletRequestWrapper(request) {
-      private Map<String, String[]> parameters = null;
-
-      @Override
-      public Map<String, String[]> getParameterMap() {
-        if (parameters == null) {
-          parameters = new HashMap<String, String[]>();
-          for(Map.Entry<String, List<String>> entry : m.entrySet()) {
-            final List<String> a = entry.getValue();
-            parameters.put(entry.getKey(), a.toArray(new String[a.size()]));
-          }
-        }
-       return parameters;
-      }
-
-      @Override
-      public String getParameter(String name) {
-        final List<String> a = m.get(name);
-        return a == null? null: a.get(0);
-      }
-      
-      @Override
-      public String[] getParameterValues(String name) {
-        return getParameterMap().get(name);
-      }
-
-      @Override
-      public Enumeration<String> getParameterNames() {
-        final Iterator<String> i = m.keySet().iterator();
-        return new Enumeration<String>() {
-          @Override
-          public boolean hasMoreElements() {
-            return i.hasNext();
-          }
-          @Override
-          public String nextElement() {
-            return i.next();
-          }
-        };
-      }
-    };
-  }
 }

+ 69 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilterInitializer.java

@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.web;
+
+import java.util.Map;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.http.FilterContainer;
+import org.apache.hadoop.http.FilterInitializer;
+import org.apache.hadoop.security.AuthenticationFilterInitializer;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+
+/**
+ * Filter initializer to initialize {@link AuthFilter}.
+ */
+public class AuthFilterInitializer extends FilterInitializer {
+
+  private String configPrefix;
+
+  public AuthFilterInitializer() {
+    this.configPrefix = "hadoop.http.authentication.";
+  }
+
+  protected Map<String, String> createFilterConfig(Configuration conf) {
+    Map<String, String> filterConfig = AuthenticationFilterInitializer
+        .getFilterConfigMap(conf, configPrefix);
+
+    for (Map.Entry<String, String> entry : conf.getPropsWithPrefix(
+        ProxyUsers.CONF_HADOOP_PROXYUSER).entrySet()) {
+      filterConfig.put("proxyuser" + entry.getKey(), entry.getValue());
+    }
+
+    if (filterConfig.get("type") == null) {
+      filterConfig.put("type", UserGroupInformation.isSecurityEnabled() ?
+          KerberosAuthenticationHandler.TYPE :
+          PseudoAuthenticationHandler.TYPE);
+    }
+
+    //set cookie path
+    filterConfig.put("cookie.path", "/");
+    return filterConfig;
+  }
+
+  @Override
+  public void initFilter(FilterContainer container, Configuration conf) {
+    Map<String, String> filterConfig = createFilterConfig(conf);
+    container.addFilter("AuthFilter", AuthFilter.class.getName(),
+        filterConfig);
+  }
+
+}

+ 40 - 85
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestAuthFilter.java

@@ -17,100 +17,55 @@
  */
 package org.apache.hadoop.hdfs.web;
 
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
 
-import org.apache.hadoop.hdfs.DFSConfigKeys;
+import java.util.Map;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.http.FilterContainer;
 import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
-import org.junit.Assert;
 import org.junit.Test;
 
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
 public class TestAuthFilter {
-  
-  private static class DummyFilterConfig implements FilterConfig {
-    final Map<String, String> map;
-    
-    DummyFilterConfig(Map<String,String> map) {
-      this.map = map;
-    }
-    
-    @Override
-    public String getFilterName() {
-      return "dummy";
-    }
-    @Override
-    public String getInitParameter(String arg0) {
-      return map.get(arg0);
-    }
-    @Override
-    public Enumeration<String> getInitParameterNames() {
-      return Collections.enumeration(map.keySet());
-    }
-    @Override
-    public ServletContext getServletContext() {
-      return null;
-    }
-  }
-  
-  @Test
-  public void testGetConfiguration() throws ServletException {
-    AuthFilter filter = new AuthFilter();
-    Map<String, String> m = new HashMap<String,String>();
-    m.put(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
-        "xyz/thehost@REALM");
-    m.put(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
-        "thekeytab");
-    FilterConfig config = new DummyFilterConfig(m);
-    Properties p = filter.getConfiguration("random", config);
-    Assert.assertEquals("xyz/thehost@REALM",
-        p.getProperty("kerberos.principal"));
-    Assert.assertEquals("thekeytab", p.getProperty("kerberos.keytab"));
-    Assert.assertEquals("true",
-        p.getProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED));
-  }
-  
-  @Test
-  public void testGetSimpleAuthDisabledConfiguration() throws ServletException {
-    AuthFilter filter = new AuthFilter();
-    Map<String, String> m = new HashMap<String,String>();
-    m.put(DFSConfigKeys.DFS_WEB_AUTHENTICATION_SIMPLE_ANONYMOUS_ALLOWED,
-        "false");
-    FilterConfig config = new DummyFilterConfig(m);
-    Properties p = filter.getConfiguration("random", config);
-    Assert.assertEquals("false",
-        p.getProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED));
-  }
-  
-  @Test
-  public void testGetSimpleAuthDefaultConfiguration() throws ServletException {
-    AuthFilter filter = new AuthFilter();
-    Map<String, String> m = new HashMap<String,String>();
-    
-    FilterConfig config = new DummyFilterConfig(m);
-    Properties p = filter.getConfiguration("random", config);
-    Assert.assertEquals("true",
-        p.getProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED));
-  }
+
+  private static final String PREFIX = "hadoop.http.authentication.";
 
   @Test
-  public void testGetCustomAuthConfiguration() throws ServletException {
-    AuthFilter filter = new AuthFilter();
-    Map<String, String> m = new HashMap<String,String>();
+  public void testGetConfiguration() {
+    Configuration conf = new Configuration();
+    conf.set(PREFIX + "type", "kerberos");
+    conf.set(PREFIX + "kerberos.keytab", "thekeytab");
+    conf.set(PREFIX + "kerberos.principal", "xyz/thehost@REALM");
 
-    m.put(AuthFilter.CONF_PREFIX + AuthFilter.AUTH_TYPE, "com.yourclass");
-    m.put(AuthFilter.CONF_PREFIX + "alt-kerberos.param", "value");
-    FilterConfig config = new DummyFilterConfig(m);
+    FilterContainer container = Mockito.mock(FilterContainer.class);
+    Mockito.doAnswer(new Answer() {
+      @Override
+      public Object answer(InvocationOnMock invocationOnMock) {
+        Object[] args = invocationOnMock.getArguments();
 
-    Properties p = filter.getConfiguration(AuthFilter.CONF_PREFIX, config);
-    Assert.assertEquals("com.yourclass", p.getProperty(AuthFilter.AUTH_TYPE));
-    Assert.assertEquals("value", p.getProperty("alt-kerberos.param"));
+        assertEquals("AuthFilter", args[0]);
+        assertEquals(AuthFilter.class.getName(), args[1]);
+
+        Map<String, String> conf = (Map<String, String>) args[2];
+        assertEquals("/", conf.get("cookie.path"));
+        assertEquals("kerberos", conf.get("type"));
+        assertNull(conf.get("cookie.domain"));
+        assertEquals("xyz/thehost@REALM", conf.get("kerberos.principal"));
+        assertEquals("thekeytab", conf.get("kerberos.keytab"));
+        assertEquals("true",
+            conf.get(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED));
+
+        return null;
+      }
+    }).when(container).addFilter(Mockito.any(), Mockito.any(), Mockito.any());
+
+    new AuthFilterInitializer().initFilter(container, conf);
   }
 
+
 }