浏览代码

HDFS-2617. svn merge -c 1334216 from trunk

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1334225 13f79535-47bb-0310-9956-ffa450edef68
Eli Collins 13 年之前
父节点
当前提交
2c4faa4b9c
共有 18 个文件被更改,包括 215 次插入604 次删除
  1. 8 32
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
  2. 0 232
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Krb5AndCertsSslSocketConnector.java
  3. 27 83
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
  4. 0 7
      hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml
  5. 3 0
      hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
  6. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
  7. 6 6
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java
  8. 8 36
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java
  9. 3 4
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
  10. 88 112
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
  11. 41 55
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
  12. 7 9
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java
  13. 1 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BootstrapStandby.java
  14. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java
  15. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
  16. 4 11
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java
  17. 4 11
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
  18. 11 0
      hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml

+ 8 - 32
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java

@@ -51,8 +51,6 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.jmx.JMXJsonServlet;
 import org.apache.hadoop.log.LogLevel;
 import org.apache.hadoop.metrics.MetricsServlet;
-import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector;
-import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector.MODE;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -98,6 +96,7 @@ public class HttpServer implements FilterContainer {
   // gets stored.
   public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
   static final String ADMINS_ACL = "admins.acl";
+  public static final String SPNEGO_FILTER = "SpnegoFilter";
 
   public static final String BIND_ADDRESS = "bind.address";
 
@@ -236,11 +235,7 @@ public class HttpServer implements FilterContainer {
     webServer.addHandler(webAppContext);
 
     addDefaultApps(contexts, appDir, conf);
-    
-    defineFilter(webAppContext, "krb5Filter", 
-        Krb5AndCertsSslSocketConnector.Krb5SslFilter.class.getName(), 
-        null, null);
-    
+        
     addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
     final FilterInitializer[] initializers = getFilterInitializers(conf); 
     if (initializers != null) {
@@ -423,12 +418,13 @@ public class HttpServer implements FilterContainer {
    * protect with Kerberos authentication. 
    * Note: This method is to be used for adding servlets that facilitate
    * internal communication and not for user facing functionality. For
-   * servlets added using this method, filters (except internal Kerberized
+   +   * servlets added using this method, filters (except internal Kerberos
    * filters) are not enabled. 
    * 
    * @param name The name of the servlet (can be passed as null)
    * @param pathSpec The path spec for the servlet
    * @param clazz The servlet class
+   * @param requireAuth Require Kerberos authenticate to access servlet
    */
   public void addInternalServlet(String name, String pathSpec, 
       Class<? extends HttpServlet> clazz, boolean requireAuth) {
@@ -439,11 +435,11 @@ public class HttpServer implements FilterContainer {
     webAppContext.addServlet(holder, pathSpec);
     
     if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
-       LOG.info("Adding Kerberos filter to " + name);
+       LOG.info("Adding Kerberos (SPNEGO) filter to " + name);
        ServletHandler handler = webAppContext.getServletHandler();
        FilterMapping fmap = new FilterMapping();
        fmap.setPathSpec(pathSpec);
-       fmap.setFilterName("krb5Filter");
+       fmap.setFilterName(SPNEGO_FILTER);
        fmap.setDispatches(Handler.ALL);
        handler.addFilterMapping(fmap);
     }
@@ -579,26 +575,14 @@ public class HttpServer implements FilterContainer {
     webServer.addConnector(sslListener);
   }
 
-  /**
-   * Configure an ssl listener on the server.
-   * @param addr address to listen on
-   * @param sslConf conf to retrieve ssl options
-   * @param needClientAuth whether client authentication is required
-   */
-  public void addSslListener(InetSocketAddress addr, Configuration sslConf,
-      boolean needClientAuth) throws IOException {
-    addSslListener(addr, sslConf, needClientAuth, false);
-  }
-
   /**
    * Configure an ssl listener on the server.
    * @param addr address to listen on
    * @param sslConf conf to retrieve ssl options
    * @param needCertsAuth whether x509 certificate authentication is required
-   * @param needKrbAuth whether to allow kerberos auth
    */
   public void addSslListener(InetSocketAddress addr, Configuration sslConf,
-      boolean needCertsAuth, boolean needKrbAuth) throws IOException {
+      boolean needCertsAuth) throws IOException {
     if (webServer.isStarted()) {
       throw new IOException("Failed to add ssl listener");
     }
@@ -611,15 +595,7 @@ public class HttpServer implements FilterContainer {
       System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
           "ssl.server.truststore.type", "jks"));
     }
-    Krb5AndCertsSslSocketConnector.MODE mode;
-    if(needCertsAuth && needKrbAuth)
-      mode = MODE.BOTH;
-    else if (!needCertsAuth && needKrbAuth)
-      mode = MODE.KRB;
-    else // Default to certificates
-      mode = MODE.CERTS;
-
-    SslSocketConnector sslListener = new Krb5AndCertsSslSocketConnector(mode);
+    SslSocketConnector sslListener = new SslSocketConnector();
     sslListener.setHost(addr.getHostName());
     sslListener.setPort(addr.getPort());
     sslListener.setKeystore(sslConf.get("ssl.server.keystore.location"));

+ 0 - 232
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Krb5AndCertsSslSocketConnector.java

@@ -1,232 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.security;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.ServerSocket;
-import java.security.Principal;
-import java.util.Collections;
-import java.util.List;
-import java.util.Random;
-
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.SSLServerSocket;
-import javax.net.ssl.SSLServerSocketFactory;
-import javax.net.ssl.SSLSocket;
-import javax.security.auth.kerberos.KerberosPrincipal;
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.mortbay.io.EndPoint;
-import org.mortbay.jetty.HttpSchemes;
-import org.mortbay.jetty.Request;
-import org.mortbay.jetty.security.ServletSSL;
-import org.mortbay.jetty.security.SslSocketConnector;
-
-/**
- * Extend Jetty's {@link SslSocketConnector} to optionally also provide 
- * Kerberos5ized SSL sockets.  The only change in behavior from superclass
- * is that we no longer honor requests to turn off NeedAuthentication when
- * running with Kerberos support.
- */
-public class Krb5AndCertsSslSocketConnector extends SslSocketConnector {
-  public static final List<String> KRB5_CIPHER_SUITES = 
-    Collections.unmodifiableList(Collections.singletonList(
-          "TLS_KRB5_WITH_3DES_EDE_CBC_SHA"));
-  static {
-    SecurityUtil.initKrb5CipherSuites();
-  }
-  
-  private static final Log LOG = LogFactory
-      .getLog(Krb5AndCertsSslSocketConnector.class);
-
-  private static final String REMOTE_PRINCIPAL = "remote_principal";
-
-  public enum MODE {KRB, CERTS, BOTH} // Support Kerberos, certificates or both?
-
-  private final boolean useKrb;
-  private final boolean useCerts;
-
-  public Krb5AndCertsSslSocketConnector() {
-    super();
-    useKrb = true;
-    useCerts = false;
-    
-    setPasswords();
-  }
-  
-  public Krb5AndCertsSslSocketConnector(MODE mode) {
-    super();
-    useKrb = mode == MODE.KRB || mode == MODE.BOTH;
-    useCerts = mode == MODE.CERTS || mode == MODE.BOTH;
-    setPasswords();
-    logIfDebug("useKerb = " + useKrb + ", useCerts = " + useCerts);
-  }
-
-  // If not using Certs, set passwords to random gibberish or else
-  // Jetty will actually prompt the user for some.
-  private void setPasswords() {
-   if(!useCerts) {
-     Random r = new Random();
-     System.setProperty("jetty.ssl.password", String.valueOf(r.nextLong()));
-     System.setProperty("jetty.ssl.keypassword", String.valueOf(r.nextLong()));
-   }
-  }
-  
-  @Override
-  protected SSLServerSocketFactory createFactory() throws Exception {
-    if(useCerts)
-      return super.createFactory();
-    
-    SSLContext context = super.getProvider()==null
-       ? SSLContext.getInstance(super.getProtocol())
-        :SSLContext.getInstance(super.getProtocol(), super.getProvider());
-    context.init(null, null, null);
-    
-    return context.getServerSocketFactory();
-  }
-  
-  /* (non-Javadoc)
-   * @see org.mortbay.jetty.security.SslSocketConnector#newServerSocket(java.lang.String, int, int)
-   */
-  @Override
-  protected ServerSocket newServerSocket(String host, int port, int backlog)
-      throws IOException {
-    logIfDebug("Creating new KrbServerSocket for: " + host);
-    SSLServerSocket ss = null;
-    
-    if(useCerts) // Get the server socket from the SSL super impl
-      ss = (SSLServerSocket)super.newServerSocket(host, port, backlog);
-    else { // Create a default server socket
-      try {
-        ss = (SSLServerSocket)(host == null 
-         ? createFactory().createServerSocket(port, backlog) :
-           createFactory().createServerSocket(port, backlog, InetAddress.getByName(host)));
-      } catch (Exception e)
-      {
-        LOG.warn("Could not create KRB5 Listener", e);
-        throw new IOException("Could not create KRB5 Listener: " + e.toString());
-      }
-    }
-    
-    // Add Kerberos ciphers to this socket server if needed.
-    if(useKrb) {
-      ss.setNeedClientAuth(true);
-      String [] combined;
-      if(useCerts) { // combine the cipher suites
-        String[] certs = ss.getEnabledCipherSuites();
-        combined = new String[certs.length + KRB5_CIPHER_SUITES.size()];
-        System.arraycopy(certs, 0, combined, 0, certs.length);
-        System.arraycopy(KRB5_CIPHER_SUITES.toArray(new String[0]), 0, combined,
-              certs.length, KRB5_CIPHER_SUITES.size());
-      } else { // Just enable Kerberos auth
-        combined = KRB5_CIPHER_SUITES.toArray(new String[0]);
-      }
-      
-      ss.setEnabledCipherSuites(combined);
-    }
-    
-    return ss;
-  };
-
-  @Override
-  public void customize(EndPoint endpoint, Request request) throws IOException {
-    if(useKrb) { // Add Kerberos-specific info
-      SSLSocket sslSocket = (SSLSocket)endpoint.getTransport();
-      Principal remotePrincipal = sslSocket.getSession().getPeerPrincipal();
-      logIfDebug("Remote principal = " + remotePrincipal);
-      request.setScheme(HttpSchemes.HTTPS);
-      request.setAttribute(REMOTE_PRINCIPAL, remotePrincipal);
-      
-      if(!useCerts) { // Add extra info that would have been added by super
-        String cipherSuite = sslSocket.getSession().getCipherSuite();
-        Integer keySize = Integer.valueOf(ServletSSL.deduceKeyLength(cipherSuite));;
-        
-        request.setAttribute("javax.servlet.request.cipher_suite", cipherSuite);
-        request.setAttribute("javax.servlet.request.key_size", keySize);
-      } 
-    }
-    
-    if(useCerts) super.customize(endpoint, request);
-  }
-  
-  private void logIfDebug(String s) {
-    if(LOG.isDebugEnabled())
-      LOG.debug(s);
-  }
-  
-  /**
-   * Filter that takes the Kerberos principal identified in the 
-   * {@link Krb5AndCertsSslSocketConnector} and provides it the to the servlet
-   * at runtime, setting the principal and short name.
-   */
-  public static class Krb5SslFilter implements Filter {
-    @Override
-    public void doFilter(ServletRequest req, ServletResponse resp,
-        FilterChain chain) throws IOException, ServletException {
-      final Principal princ = 
-        (Principal)req.getAttribute(Krb5AndCertsSslSocketConnector.REMOTE_PRINCIPAL);
-      
-      if(princ == null || !(princ instanceof KerberosPrincipal)) {
-        // Should never actually get here, since should be rejected at socket
-        // level.
-        LOG.warn("User not authenticated via kerberos from " + req.getRemoteAddr());
-        ((HttpServletResponse)resp).sendError(HttpServletResponse.SC_FORBIDDEN, 
-            "User not authenticated via Kerberos");
-        return;
-      }
-      
-      // Provide principal information for servlet at runtime
-      ServletRequest wrapper = 
-            new HttpServletRequestWrapper((HttpServletRequest) req) {
-        @Override
-        public Principal getUserPrincipal() {
-          return princ;
-        }
-        
-        /* 
-         * Return the full name of this remote user.
-         * @see javax.servlet.http.HttpServletRequestWrapper#getRemoteUser()
-         */
-        @Override 
-        public String getRemoteUser() {
-          return princ.getName();
-        }
-      };
-      
-      chain.doFilter(wrapper, resp);
-    }
-
-    @Override
-    public void init(FilterConfig arg0) throws ServletException {
-      /* Nothing to do here */
-    }
-
-    @Override
-    public void destroy() { /* Nothing to do here */ }
-  }
-}

+ 27 - 83
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java

@@ -17,14 +17,11 @@
 package org.apache.hadoop.security;
 
 import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.Field;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URL;
+import java.net.URLConnection;
 import java.net.UnknownHostException;
 import java.security.AccessController;
 import java.security.PrivilegedAction;
@@ -45,6 +42,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenInfo;
 
@@ -134,79 +133,6 @@ public class SecurityUtil {
     return isTGSPrincipal(ticket.getServer());
   }
 
-  /**
-   * Explicitly pull the service ticket for the specified host.  This solves a
-   * problem with Java's Kerberos SSL problem where the client cannot 
-   * authenticate against a cross-realm service.  It is necessary for clients
-   * making kerberized https requests to call this method on the target URL
-   * to ensure that in a cross-realm environment the remote host will be 
-   * successfully authenticated.  
-   * 
-   * This method is internal to Hadoop and should not be used by other 
-   * applications.  This method should not be considered stable or open: 
-   * it will be removed when the Java behavior is changed.
-   * 
-   * @param remoteHost Target URL the krb-https client will access
-   * @throws IOException if the service ticket cannot be retrieved
-   */
-  public static void fetchServiceTicket(URL remoteHost) throws IOException {
-    if(!UserGroupInformation.isSecurityEnabled())
-      return;
-    
-    String serviceName = "host/" + remoteHost.getHost();
-    if (LOG.isDebugEnabled())
-      LOG.debug("Fetching service ticket for host at: " + serviceName);
-    Object serviceCred = null;
-    Method credsToTicketMeth;
-    Class<?> krb5utilClass;
-    try {
-      Class<?> principalClass;
-      Class<?> credentialsClass;
-      
-      if (System.getProperty("java.vendor").contains("IBM")) {
-        principalClass = Class.forName("com.ibm.security.krb5.PrincipalName");
-        
-        credentialsClass = Class.forName("com.ibm.security.krb5.Credentials");
-        krb5utilClass = Class.forName("com.ibm.security.jgss.mech.krb5");
-      } else {
-        principalClass = Class.forName("sun.security.krb5.PrincipalName");
-        credentialsClass = Class.forName("sun.security.krb5.Credentials");
-        krb5utilClass = Class.forName("sun.security.jgss.krb5.Krb5Util");
-      }
-      @SuppressWarnings("rawtypes")
-      Constructor principalConstructor = principalClass.getConstructor(String.class, 
-          int.class);
-      Field KRB_NT_SRV_HST = principalClass.getDeclaredField("KRB_NT_SRV_HST");
-      Method acquireServiceCredsMeth = 
-          credentialsClass.getDeclaredMethod("acquireServiceCreds", 
-              String.class, credentialsClass);
-      Method ticketToCredsMeth = krb5utilClass.getDeclaredMethod("ticketToCreds", 
-          KerberosTicket.class);
-      credsToTicketMeth = krb5utilClass.getDeclaredMethod("credsToTicket", 
-          credentialsClass);
-      
-      Object principal = principalConstructor.newInstance(serviceName,
-          KRB_NT_SRV_HST.get(principalClass));
-      
-      serviceCred = acquireServiceCredsMeth.invoke(credentialsClass, 
-          principal.toString(), 
-          ticketToCredsMeth.invoke(krb5utilClass, getTgtFromSubject()));
-    } catch (Exception e) {
-      throw new IOException("Can't get service ticket for: "
-          + serviceName, e);
-    }
-    if (serviceCred == null) {
-      throw new IOException("Can't get service ticket for " + serviceName);
-    }
-    try {
-      Subject.getSubject(AccessController.getContext()).getPrivateCredentials()
-          .add(credsToTicketMeth.invoke(krb5utilClass, serviceCred));
-    } catch (Exception e) {
-      throw new IOException("Can't get service ticket for: "
-          + serviceName, e);
-    }
-  }
-  
   /**
    * Convert Kerberos principal name pattern to valid Kerberos principal
    * names. It replaces hostname pattern with hostname, which should be
@@ -513,6 +439,30 @@ public class SecurityUtil {
     }
   }
 
+  /**
+   * Open a (if need be) secure connection to a URL in a secure environment
+   * that is using SPNEGO to authenticate its URLs. All Namenode and Secondary
+   * Namenode URLs that are protected via SPNEGO should be accessed via this
+   * method.
+   *
+   * @param url to authenticate via SPNEGO.
+   * @return A connection that has been authenticated via SPNEGO
+   * @throws IOException If unable to authenticate via SPNEGO
+   */
+  public static URLConnection openSecureHttpConnection(URL url) throws IOException {
+    if(!UserGroupInformation.isSecurityEnabled()) {
+      return url.openConnection();
+    }
+
+    AuthenticatedURL.Token token = new AuthenticatedURL.Token();
+    try {
+      return new AuthenticatedURL().openConnection(url, token);
+    } catch (AuthenticationException e) {
+      throw new IOException("Exception trying to open authenticated connection to "
+              + url, e);
+    }
+  }
+
   /**
    * Resolves a host subject to the security requirements determined by
    * hadoop.security.token.service.use_ip.
@@ -664,10 +614,4 @@ public class SecurityUtil {
     }
   }
 
-  public static void initKrb5CipherSuites() {
-    if (UserGroupInformation.isSecurityEnabled()) {
-      System.setProperty("https.cipherSuites",
-          Krb5AndCertsSslSocketConnector.KRB5_CIPHER_SUITES.get(0));
-    }
-  }
 }

+ 0 - 7
hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml

@@ -120,13 +120,6 @@
     </description>
   </property>
 
-  <property>
-    <name>dfs.secondary.https.port</name>
-    <value>50490</value>
-    <description>The https port where secondary-namenode binds</description>
-
-  </property>
-
   <property>
     <name>dfs.datanode.kerberos.principal</name>
     <value>dn/_HOST@${local.realm}</value>

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -280,6 +280,9 @@ Release 2.0.0 - UNRELEASED
     HDFS-3303. Remove Writable implementation from RemoteEditLogManifest.
     (Brandon Li via szetszwo)
 
+    HDFS-2617. Replaced Kerberized SSL for image transfer and fsck
+    with SPNEGO-based solution. (jghoman, tucu, and atm via eli)
+
   OPTIMIZATIONS
 
     HDFS-2477. Optimize computing the diff between a block report and the

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java

@@ -317,10 +317,10 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   public static final String  DFS_DATANODE_USER_NAME_KEY = "dfs.datanode.kerberos.principal";
   public static final String  DFS_NAMENODE_KEYTAB_FILE_KEY = "dfs.namenode.keytab.file";
   public static final String  DFS_NAMENODE_USER_NAME_KEY = "dfs.namenode.kerberos.principal";
-  public static final String  DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY = "dfs.namenode.kerberos.https.principal";
+  public static final String  DFS_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY = "dfs.namenode.kerberos.internal.spnego.principal";
   public static final String  DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY = "dfs.secondary.namenode.keytab.file";
   public static final String  DFS_SECONDARY_NAMENODE_USER_NAME_KEY = "dfs.secondary.namenode.kerberos.principal";
-  public static final String  DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY = "dfs.secondary.namenode.kerberos.https.principal";
+  public static final String  DFS_SECONDARY_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY = "dfs.secondary.namenode.kerberos.internal.spnego.principal";
   public static final String  DFS_NAMENODE_NAME_CACHE_THRESHOLD_KEY = "dfs.namenode.name.cache.threshold";
   public static final int     DFS_NAMENODE_NAME_CACHE_THRESHOLD_DEFAULT = 10;
   

+ 6 - 6
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java

@@ -144,7 +144,7 @@ public class HftpFileSystem extends FileSystem
   }
 
   protected URI getNamenodeSecureUri(URI uri) {
-    return DFSUtil.createUri("https", getNamenodeSecureAddr(uri));
+    return DFSUtil.createUri("http", getNamenodeSecureAddr(uri));
   }
 
   @Override
@@ -247,7 +247,7 @@ public class HftpFileSystem extends FileSystem
             c = DelegationTokenFetcher.getDTfromRemote(nnHttpUrl, renewer);
           } catch (Exception e) {
             LOG.info("Couldn't get a delegation token from " + nnHttpUrl + 
-            " using https.");
+            " using http.");
             if(LOG.isDebugEnabled()) {
               LOG.debug("error was ", e);
             }
@@ -686,11 +686,11 @@ public class HftpFileSystem extends FileSystem
                       Configuration conf) throws IOException {
       // update the kerberos credentials, if they are coming from a keytab
       UserGroupInformation.getLoginUser().reloginFromKeytab();
-      // use https to renew the token
+      // use http to renew the token
       InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token);
       return 
         DelegationTokenFetcher.renewDelegationToken
-        (DFSUtil.createUri("https", serviceAddr).toString(), 
+        (DFSUtil.createUri("http", serviceAddr).toString(),
          (Token<DelegationTokenIdentifier>) token);
     }
 
@@ -700,10 +700,10 @@ public class HftpFileSystem extends FileSystem
                        Configuration conf) throws IOException {
       // update the kerberos credentials, if they are coming from a keytab
       UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
-      // use https to cancel the token
+      // use http to cancel the token
       InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token);
       DelegationTokenFetcher.cancelDelegationToken
-        (DFSUtil.createUri("https", serviceAddr).toString(), 
+        (DFSUtil.createUri("http", serviceAddr).toString(),
          (Token<DelegationTokenIdentifier>) token);
     }    
   }

+ 8 - 36
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java

@@ -27,6 +27,8 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.security.SecurityUtil;
 
 import org.apache.commons.logging.Log;
@@ -34,7 +36,6 @@ import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HAUtil;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
@@ -83,11 +84,11 @@ public class GetImageServlet extends HttpServlet {
         (Configuration)getServletContext().getAttribute(JspHelper.CURRENT_CONF);
       
       if(UserGroupInformation.isSecurityEnabled() && 
-          !isValidRequestor(request.getRemoteUser(), conf)) {
+          !isValidRequestor(request.getUserPrincipal().getName(), conf)) {
         response.sendError(HttpServletResponse.SC_FORBIDDEN, 
             "Only Namenode and Secondary Namenode may access this servlet");
         LOG.warn("Received non-NN/SNN request for image or edits from " 
-            + request.getRemoteHost());
+            + request.getUserPrincipal().getName() + " at " + request.getRemoteHost());
         return;
       }
       
@@ -156,15 +157,10 @@ public class GetImageServlet extends HttpServlet {
               }
               
               // issue a HTTP get request to download the new fsimage 
-              MD5Hash downloadImageDigest = reloginIfNecessary().doAs(
-                  new PrivilegedExceptionAction<MD5Hash>() {
-                  @Override
-                  public MD5Hash run() throws Exception {
-                    return TransferFsImage.downloadImageToStorage(
+              MD5Hash downloadImageDigest =
+                TransferFsImage.downloadImageToStorage(
                         parsedParams.getInfoServer(), txid,
                         nnImage.getStorage(), true);
-                    }
-              });
               nnImage.saveDigestAndRenameCheckpointImage(txid, downloadImageDigest);
               
               // Now that we have a new checkpoint, we might be able to
@@ -176,18 +172,6 @@ public class GetImageServlet extends HttpServlet {
           }
           return null;
         }
-        
-        // We may have lost our ticket since the last time we tried to open
-        // an http connection, so log in just in case.
-        private UserGroupInformation reloginIfNecessary() throws IOException {
-          // This method is only called on the NN, therefore it is safe to
-          // use these key values.
-          return UserGroupInformation.loginUserFromKeytabAndReturnUGI(
-                  SecurityUtil.getServerPrincipal(conf
-                      .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
-                      NameNode.getAddress(conf).getHostName()),
-              conf.get(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY));
-        }       
       });
       
     } catch (Throwable t) {
@@ -232,18 +216,10 @@ public class GetImageServlet extends HttpServlet {
     
     Set<String> validRequestors = new HashSet<String>();
 
-    validRequestors.add(
-        SecurityUtil.getServerPrincipal(conf
-            .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), NameNode
-            .getAddress(conf).getHostName()));
     validRequestors.add(
         SecurityUtil.getServerPrincipal(conf
             .get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), NameNode
             .getAddress(conf).getHostName()));
-    validRequestors.add(
-        SecurityUtil.getServerPrincipal(conf
-            .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
-            SecondaryNameNode.getHttpAddress(conf).getHostName()));
     validRequestors.add(
         SecurityUtil.getServerPrincipal(conf
             .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY),
@@ -251,10 +227,6 @@ public class GetImageServlet extends HttpServlet {
 
     if (HAUtil.isHAEnabled(conf, DFSUtil.getNamenodeNameServiceId(conf))) {
       Configuration otherNnConf = HAUtil.getConfForOtherNode(conf);
-      validRequestors.add(
-          SecurityUtil.getServerPrincipal(otherNnConf
-              .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
-              NameNode.getAddress(otherNnConf).getHostName()));
       validRequestors.add(
           SecurityUtil.getServerPrincipal(otherNnConf
               .get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY),
@@ -263,11 +235,11 @@ public class GetImageServlet extends HttpServlet {
 
     for(String v : validRequestors) {
       if(v != null && v.equals(remoteUser)) {
-        if(LOG.isDebugEnabled()) LOG.debug("isValidRequestor is allowing: " + remoteUser);
+        if(LOG.isInfoEnabled()) LOG.info("GetImageServlet allowing: " + remoteUser);
         return true;
       }
     }
-    if(LOG.isDebugEnabled()) LOG.debug("isValidRequestor is rejecting: " + remoteUser);
+    if(LOG.isInfoEnabled()) LOG.info("GetImageServlet rejecting: " + remoteUser);
     return false;
   }
   

+ 3 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java

@@ -164,10 +164,8 @@ public class NameNode {
     DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY,
     DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
     DFS_NAMENODE_HTTP_ADDRESS_KEY,
-    DFS_NAMENODE_HTTPS_ADDRESS_KEY,
     DFS_NAMENODE_KEYTAB_FILE_KEY,
     DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
-    DFS_NAMENODE_SECONDARY_HTTPS_PORT_KEY,
     DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY,
     DFS_NAMENODE_BACKUP_ADDRESS_KEY,
     DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY,
@@ -361,8 +359,9 @@ public class NameNode {
   }
   
   protected void setHttpServerAddress(Configuration conf) {
-    conf.set(DFS_NAMENODE_HTTP_ADDRESS_KEY,
-        NetUtils.getHostPortString(getHttpAddress()));
+    String hostPort = NetUtils.getHostPortString(getHttpAddress());
+    conf.set(DFS_NAMENODE_HTTP_ADDRESS_KEY, hostPort);
+    LOG.info("Web-server up at: " + hostPort);
   }
 
   protected void loadNamesystem(Configuration conf) throws IOException {

+ 88 - 112
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ADMIN;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT;
@@ -43,6 +44,7 @@ import org.apache.hadoop.http.HttpServer;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authorize.AccessControlList;
 
 /**
@@ -78,127 +80,101 @@ public class NameNodeHttpServer {
         conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY),
         nn.getNameNodeAddress().getHostName());
   }
-  
+
   public void start() throws IOException {
     final String infoHost = bindAddress.getHostName();
-    
-    if(UserGroupInformation.isSecurityEnabled()) {
-      String httpsUser = SecurityUtil.getServerPrincipal(conf
-          .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), infoHost);
-      if (httpsUser == null) {
-        LOG.warn(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY
-            + " not defined in config. Starting http server as "
-            + getDefaultServerPrincipal()
-            + ": Kerberized SSL may be not function correctly.");
-      } else {
-        // Kerberized SSL servers must be run from the host principal...
-        LOG.info("Logging in as " + httpsUser + " to start http server.");
-        SecurityUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
-            DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY, infoHost);
-      }
-    }
-
-    UserGroupInformation ugi = UserGroupInformation.getLoginUser();
-    try {
-      this.httpServer = ugi.doAs(new PrivilegedExceptionAction<HttpServer>() {
-        @Override
-        public HttpServer run() throws IOException, InterruptedException {
-          int infoPort = bindAddress.getPort();
-          httpServer = new HttpServer("hdfs", infoHost, infoPort,
-              infoPort == 0, conf, 
-              new AccessControlList(conf.get(DFSConfigKeys.DFS_ADMIN, " "))) {
-            {
-              if (WebHdfsFileSystem.isEnabled(conf, LOG)) {
-                //add SPNEGO authentication filter for webhdfs
-                final String name = "SPNEGO";
-                final String classname =  AuthFilter.class.getName();
-                final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
-                Map<String, String> params = getAuthFilterParams(conf);
-                defineFilter(webAppContext, name, classname, params,
-                    new String[]{pathSpec});
-                LOG.info("Added filter '" + name + "' (class=" + classname + ")");
-
-                // add webhdfs packages
-                addJerseyResourcePackage(
-                    NamenodeWebHdfsMethods.class.getPackage().getName()
-                    + ";" + Param.class.getPackage().getName(), pathSpec);
-              }
+    int infoPort = bindAddress.getPort();
+
+    httpServer = new HttpServer("hdfs", infoHost, infoPort,
+                                infoPort == 0, conf,
+                                new AccessControlList(conf.get(DFS_ADMIN, " "))) {
+      {
+        // Add SPNEGO support to NameNode
+        if (UserGroupInformation.isSecurityEnabled()) {
+          Map<String, String> params = new HashMap<String, String>();
+          String principalInConf = conf.get(
+            DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY);
+          if (principalInConf != null && !principalInConf.isEmpty()) {
+            params.put("kerberos.principal",
+                       SecurityUtil.getServerPrincipal(principalInConf, infoHost));
+            String httpKeytab = conf.get(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY);
+            if (httpKeytab != null && !httpKeytab.isEmpty()) {
+              params.put("kerberos.keytab", httpKeytab);
             }
 
-            private Map<String, String> getAuthFilterParams(Configuration conf)
-                throws IOException {
-              Map<String, String> params = new HashMap<String, String>();
-              String principalInConf = conf
-                  .get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
-              if (principalInConf != null && !principalInConf.isEmpty()) {
-                params
-                    .put(
-                        DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
-                        SecurityUtil.getServerPrincipal(principalInConf,
-                            infoHost));
-              }
-              String httpKeytab = conf
-                  .get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
-              if (httpKeytab != null && !httpKeytab.isEmpty()) {
-                params.put(
-                    DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
-                    httpKeytab);
-              }
-              return params;
-            }
-          };
-
-          boolean certSSL = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, false);
-          boolean useKrb = UserGroupInformation.isSecurityEnabled();
-          if (certSSL || useKrb) {
-            boolean needClientAuth = conf.getBoolean(
-                DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
-                DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
-            InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf
-                .get(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
-                    DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT));
-            Configuration sslConf = new HdfsConfiguration(false);
-            if (certSSL) {
-              sslConf.addResource(conf.get(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
-                  DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT));
-            }
-            httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth,
-                useKrb);
-            // assume same ssl port for all datanodes
-            InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(
-                conf.get(DFS_DATANODE_HTTPS_ADDRESS_KEY,
-                    infoHost + ":" + DFSConfigKeys.DFS_DATANODE_HTTPS_DEFAULT_PORT));
-            httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY,
-                datanodeSslPort.getPort());
+            params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
+
+            defineFilter(webAppContext, SPNEGO_FILTER,
+                         AuthenticationFilter.class.getName(), params, null);
           }
-          httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
-          httpServer.setAttribute(NAMENODE_ADDRESS_ATTRIBUTE_KEY,
-              nn.getNameNodeAddress());
-          httpServer.setAttribute(FSIMAGE_ATTRIBUTE_KEY, nn.getFSImage());
-          httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
-          setupServlets(httpServer, conf);
-          httpServer.start();
-
-          // The web-server port can be ephemeral... ensure we have the correct
-          // info
-          infoPort = httpServer.getPort();
-          httpAddress = new InetSocketAddress(infoHost, infoPort);
-          LOG.info(nn.getRole() + " Web-server up at: " + httpAddress);
-          return httpServer;
         }
-      });
-    } catch (InterruptedException e) {
-      throw new IOException(e);
-    } finally {
-      if(UserGroupInformation.isSecurityEnabled() && 
-          conf.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY) != null) {
-        // Go back to being the correct Namenode principal
-        LOG.info("Logging back in as NameNode user following http server start");
-        nn.loginAsNameNodeUser(conf);
+        if (WebHdfsFileSystem.isEnabled(conf, LOG)) {
+          //add SPNEGO authentication filter for webhdfs
+          final String name = "SPNEGO";
+          final String classname = AuthFilter.class.getName();
+          final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
+          Map<String, String> params = getAuthFilterParams(conf);
+          defineFilter(webAppContext, name, classname, params,
+                       new String[]{pathSpec});
+          LOG.info("Added filter '" + name + "' (class=" + classname + ")");
+
+          // add webhdfs packages
+          addJerseyResourcePackage(
+            NamenodeWebHdfsMethods.class.getPackage().getName()
+            + ";" + Param.class.getPackage().getName(), pathSpec);
+        }
       }
+
+      private Map<String, String> getAuthFilterParams(Configuration conf)
+        throws IOException {
+        Map<String, String> params = new HashMap<String, String>();
+        String principalInConf = conf
+          .get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
+        if (principalInConf != null && !principalInConf.isEmpty()) {
+          params
+            .put(
+              DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+              SecurityUtil.getServerPrincipal(principalInConf,
+                                              bindAddress.getHostName()));
+        }
+        String httpKeytab = conf
+          .get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
+        if (httpKeytab != null && !httpKeytab.isEmpty()) {
+          params.put(
+            DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
+            httpKeytab);
+        }
+        return params;
+      }
+    };
+
+    boolean certSSL = conf.getBoolean("dfs.https.enable", false);
+    if (certSSL) {
+      boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false);
+      InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(infoHost + ":" + conf.get(
+        "dfs.https.port", infoHost + ":" + 0));
+      Configuration sslConf = new Configuration(false);
+      if (certSSL) {
+        sslConf.addResource(conf.get("dfs.https.server.keystore.resource",
+                                     "ssl-server.xml"));
+      }
+      httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth);
+      // assume same ssl port for all datanodes
+      InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get(
+        "dfs.datanode.https.address", infoHost + ":" + 50475));
+      httpServer.setAttribute("datanode.https.port", datanodeSslPort
+        .getPort());
     }
+    httpServer.setAttribute("name.node", nn);
+    httpServer.setAttribute("name.node.address", bindAddress);
+    httpServer.setAttribute("name.system.image", nn.getFSImage());
+    httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
+    setupServlets(httpServer, conf);
+    httpServer.start();
+    httpAddress = new InetSocketAddress(bindAddress.getAddress(), httpServer.getPort());
   }
-  
+
+
   public void stop() throws Exception {
     if (httpServer != null) {
       httpServer.stop();

+ 41 - 55
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java

@@ -25,8 +25,10 @@ import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
 import java.util.Collection;
 import java.util.Date;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
@@ -44,6 +46,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
 
+import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HAUtil;
 import org.apache.hadoop.hdfs.NameNodeProxies;
@@ -63,9 +66,9 @@ import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.source.JvmMetrics;
 import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authorize.AccessControlList;
 
 import org.apache.hadoop.util.Daemon;
@@ -108,7 +111,6 @@ public class SecondaryNameNode implements Runnable {
   private volatile boolean shouldRun;
   private HttpServer infoServer;
   private int infoPort;
-  private int imagePort;
   private String infoBindAddress;
 
   private Collection<URI> checkpointDirs;
@@ -229,63 +231,47 @@ public class SecondaryNameNode implements Runnable {
 
     // Initialize other scheduling parameters from the configuration
     checkpointConf = new CheckpointConf(conf);
-    
+
     // initialize the webserver for uploading files.
-    // Kerberized SSL servers must be run from the host principal...
-    UserGroupInformation httpUGI = 
-      UserGroupInformation.loginUserFromKeytabAndReturnUGI(
-          SecurityUtil.getServerPrincipal(conf
-              .get(DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
-              infoBindAddress),
-          conf.get(DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY));
-    try {
-      infoServer = httpUGI.doAs(new PrivilegedExceptionAction<HttpServer>() {
-        @Override
-        public HttpServer run() throws IOException, InterruptedException {
-          LOG.info("Starting web server as: " +
-              UserGroupInformation.getCurrentUser().getUserName());
-
-          int tmpInfoPort = infoSocAddr.getPort();
-          infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort,
-              tmpInfoPort == 0, conf, 
-              new AccessControlList(conf.get(DFS_ADMIN, " ")));
-          
-          if(UserGroupInformation.isSecurityEnabled()) {
-            SecurityUtil.initKrb5CipherSuites();
-            InetSocketAddress secInfoSocAddr = 
-              NetUtils.createSocketAddr(infoBindAddress + ":"+ conf.getInt(
-                DFS_NAMENODE_SECONDARY_HTTPS_PORT_KEY,
-                DFS_NAMENODE_SECONDARY_HTTPS_PORT_DEFAULT));
-            imagePort = secInfoSocAddr.getPort();
-            infoServer.addSslListener(secInfoSocAddr, conf, false, true);
+    int tmpInfoPort = infoSocAddr.getPort();
+    infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort,
+                                tmpInfoPort == 0, conf,
+                                new AccessControlList(conf.get(DFS_ADMIN, " "))) {
+      {
+        if (UserGroupInformation.isSecurityEnabled()) {
+          Map<String, String> params = new HashMap<String, String>();
+          String principalInConf = conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY);
+          if (principalInConf != null && !principalInConf.isEmpty()) {
+            params.put("kerberos.principal",
+                       SecurityUtil.getServerPrincipal(principalInConf, infoSocAddr.getHostName()));
           }
-          
-          infoServer.setAttribute("secondary.name.node", SecondaryNameNode.this);
-          infoServer.setAttribute("name.system.image", checkpointImage);
-          infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
-          infoServer.addInternalServlet("getimage", "/getimage",
-              GetImageServlet.class, true);
-          infoServer.start();
-          return infoServer;
+          String httpKeytab = conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY);
+          if (httpKeytab != null && !httpKeytab.isEmpty()) {
+            params.put("kerberos.keytab", httpKeytab);
+          }
+          params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
+
+          defineFilter(webAppContext, SPNEGO_FILTER, AuthenticationFilter.class.getName(),
+                       params, null);
         }
-      });
-    } catch (InterruptedException e) {
-      throw new RuntimeException(e);
-    } 
-    
+      }
+    };
+    infoServer.setAttribute("secondary.name.node", this);
+    infoServer.setAttribute("name.system.image", checkpointImage);
+    infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
+    infoServer.addInternalServlet("getimage", "/getimage",
+                                  GetImageServlet.class, true);
+    infoServer.start();
+
     LOG.info("Web server init done");
 
     // The web-server port can be ephemeral... ensure we have the correct info
     infoPort = infoServer.getPort();
-    if (!UserGroupInformation.isSecurityEnabled()) {
-      imagePort = infoPort;
-    }
-    
-    conf.set(DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, infoBindAddress + ":" +infoPort); 
-    LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" +infoPort);
-    LOG.info("Secondary image servlet up at: " + infoBindAddress + ":" + imagePort);
+
+    conf.set(DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, infoBindAddress + ":" + infoPort);
+    LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" + infoPort);
     LOG.info("Checkpoint Period   :" + checkpointConf.getPeriod() + " secs " +
-             "(" + checkpointConf.getPeriod()/60 + " min)");
+             "(" + checkpointConf.getPeriod() / 60 + " min)");
     LOG.info("Log Size Trigger    :" + checkpointConf.getTxnCount() + " txns");
   }
 
@@ -434,7 +420,7 @@ public class SecondaryNameNode implements Runnable {
       throw new IOException("This is not a DFS");
     }
 
-    String configuredAddress = DFSUtil.getInfoServer(null, conf, true);
+    String configuredAddress = DFSUtil.getInfoServer(null, conf, false);
     String address = DFSUtil.substituteForWildcardAddress(configuredAddress,
         fsName.getHost());
     LOG.debug("Will connect to NameNode at HTTP address: " + address);
@@ -446,7 +432,7 @@ public class SecondaryNameNode implements Runnable {
    * for image transfers
    */
   private InetSocketAddress getImageListenAddress() {
-    return new InetSocketAddress(infoBindAddress, imagePort);
+    return new InetSocketAddress(infoBindAddress, infoPort);
   }
 
   /**
@@ -507,7 +493,7 @@ public class SecondaryNameNode implements Runnable {
   
   
   /**
-   * @param argv The parameters passed to this program.
+   * @param opts The parameters passed to this program.
    * @exception Exception if the filesystem does not exist.
    * @return 0 on success, non zero on error.
    */
@@ -709,7 +695,7 @@ public class SecondaryNameNode implements Runnable {
      * Construct a checkpoint image.
      * @param conf Node configuration.
      * @param imageDirs URIs of storage for image.
-     * @param editDirs URIs of storage for edit logs.
+     * @param editsDirs URIs of storage for edit logs.
      * @throws IOException If storage cannot be access.
      */
     CheckpointStorage(Configuration conf, 

+ 7 - 9
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java

@@ -201,19 +201,17 @@ public class TransferFsImage {
       String queryString, List<File> localPaths,
       NNStorage dstStorage, boolean getChecksum) throws IOException {
     byte[] buf = new byte[HdfsConstants.IO_FILE_BUFFER_SIZE];
-    String proto = UserGroupInformation.isSecurityEnabled() ? "https://" : "http://";
-    StringBuilder str = new StringBuilder(proto+nnHostPort+"/getimage?");
-    str.append(queryString);
 
+    String str = "http://" + nnHostPort + "/getimage?" + queryString;
+    LOG.info("Opening connection to " + str);
     //
     // open connection to remote server
     //
-    URL url = new URL(str.toString());
-    
-    // Avoid Krb bug with cross-realm hosts
-    SecurityUtil.fetchServiceTicket(url);
-    HttpURLConnection connection = (HttpURLConnection) url.openConnection();
-    
+    URL url = new URL(str);
+
+    HttpURLConnection connection = (HttpURLConnection)
+      SecurityUtil.openSecureHttpConnection(url);
+
     if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
       throw new HttpGetFailedException(
           "Image transfer servlet at " + url +

+ 1 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BootstrapStandby.java

@@ -95,7 +95,6 @@ public class BootstrapStandby implements Tool, Configurable {
   static final int ERR_CODE_LOGS_UNAVAILABLE = 6; 
 
   public int run(String[] args) throws Exception {
-    SecurityUtil.initKrb5CipherSuites();
     parseArgs(args);
     parseConfAndFindOtherNN();
     NameNode.checkAllowFormat(conf);
@@ -322,7 +321,7 @@ public class BootstrapStandby implements Tool, Configurable {
         "Could not determine valid IPC address for other NameNode (%s)" +
         ", got: %s", otherNNId, otherIpcAddr);
 
-    otherHttpAddr = DFSUtil.getInfoServer(null, otherNode, true);
+    otherHttpAddr = DFSUtil.getInfoServer(null, otherNode, false);
     otherHttpAddr = DFSUtil.substituteForWildcardAddress(otherHttpAddr,
         otherIpcAddr.getHostName());
     

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java

@@ -92,7 +92,7 @@ public class StandbyCheckpointer {
   }
   
   private String getHttpAddress(Configuration conf) {
-    String configuredAddr = DFSUtil.getInfoServer(null, conf, true);
+    String configuredAddr = DFSUtil.getInfoServer(null, conf, false);
     
     // Use the hostname from the RPC address as a default, in case
     // the HTTP address is configured to 0.0.0.0.

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java

@@ -504,7 +504,7 @@ public class DFSAdmin extends FsShell {
    */
   public int fetchImage(String[] argv, int idx) throws IOException {
     String infoServer = DFSUtil.getInfoServer(
-        HAUtil.getAddressOfActive(getDFS()), getConf(), true);
+        HAUtil.getAddressOfActive(getDFS()), getConf(), false);
     TransferFsImage.downloadMostRecentImageToDirectory(infoServer,
         new File(argv[idx]));
     return 0;

+ 4 - 11
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java

@@ -153,8 +153,7 @@ public class DFSck extends Configured implements Tool {
         url.append("&startblockafter=").append(String.valueOf(cookie));
       }
       URL path = new URL(url.toString());
-      SecurityUtil.fetchServiceTicket(path);
-      URLConnection connection = path.openConnection();
+      URLConnection connection = SecurityUtil.openSecureHttpConnection(path);
       InputStream stream = connection.getInputStream();
       BufferedReader input = new BufferedReader(new InputStreamReader(
           stream, "UTF-8"));
@@ -222,16 +221,11 @@ public class DFSck extends Configured implements Tool {
       return null;
     }
     
-    return DFSUtil.getInfoServer(HAUtil.getAddressOfActive(fs), conf, true);
+    return DFSUtil.getInfoServer(HAUtil.getAddressOfActive(fs), conf, false);
   }
 
   private int doWork(final String[] args) throws IOException {
-    String proto = "http://";
-    if (UserGroupInformation.isSecurityEnabled()) {
-      SecurityUtil.initKrb5CipherSuites();
-      proto = "https://";
-    }
-    final StringBuilder url = new StringBuilder(proto);
+    final StringBuilder url = new StringBuilder("http://");
     
     String namenodeAddress = getCurrentNamenodeAddress();
     if (namenodeAddress == null) {
@@ -279,8 +273,7 @@ public class DFSck extends Configured implements Tool {
       return listCorruptFileBlocks(dir, url.toString());
     }
     URL path = new URL(url.toString());
-    SecurityUtil.fetchServiceTicket(path);
-    URLConnection connection = path.openConnection();
+    URLConnection connection = SecurityUtil.openSecureHttpConnection(path);
     InputStream stream = connection.getInputStream();
     BufferedReader input = new BufferedReader(new InputStreamReader(
                                               stream, "UTF-8"));

+ 4 - 11
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java

@@ -72,11 +72,6 @@ public class DelegationTokenFetcher {
   private static final String RENEW = "renew";
   private static final String PRINT = "print";
 
-  static {
-    // Enable Kerberos sockets
-    System.setProperty("https.cipherSuites", "TLS_KRB5_WITH_3DES_EDE_CBC_SHA");
-  }
-
   private static void printUsage(PrintStream err) throws IOException {
     err.println("fetchdt retrieves delegation tokens from the NameNode");
     err.println();
@@ -106,7 +101,7 @@ public class DelegationTokenFetcher {
     final Configuration conf = new HdfsConfiguration();
     Options fetcherOptions = new Options();
     fetcherOptions.addOption(WEBSERVICE, true,
-        "HTTPS url to reach the NameNode at");
+        "HTTP url to reach the NameNode at");
     fetcherOptions.addOption(RENEWER, true,
         "Name of the delegation token renewer");
     fetcherOptions.addOption(CANCEL, false, "cancel the token");
@@ -216,8 +211,7 @@ public class DelegationTokenFetcher {
         url.append(nnAddr).append(GetDelegationTokenServlet.PATH_SPEC);
       }
       URL remoteURL = new URL(url.toString());
-      SecurityUtil.fetchServiceTicket(remoteURL);
-      URLConnection connection = URLUtils.openConnection(remoteURL);
+      URLConnection connection = SecurityUtil.openSecureHttpConnection(remoteURL);
       InputStream in = connection.getInputStream();
       Credentials ts = new Credentials();
       dis = new DataInputStream(in);
@@ -256,7 +250,7 @@ public class DelegationTokenFetcher {
     
     try {
       URL url = new URL(buf.toString());
-      SecurityUtil.fetchServiceTicket(url);
+      connection = (HttpURLConnection) SecurityUtil.openSecureHttpConnection(url);
       connection = (HttpURLConnection)URLUtils.openConnection(url);
       if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
         throw new IOException("Error renewing token: " + 
@@ -350,8 +344,7 @@ public class DelegationTokenFetcher {
     HttpURLConnection connection=null;
     try {
       URL url = new URL(buf.toString());
-      SecurityUtil.fetchServiceTicket(url);
-      connection = (HttpURLConnection)URLUtils.openConnection(url);
+      connection = (HttpURLConnection) SecurityUtil.openSecureHttpConnection(url);
       if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
         throw new IOException("Error cancelling token: " + 
             connection.getResponseMessage());

+ 11 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml

@@ -858,4 +858,15 @@
   </description>
 </property>
 
+<property>
+  <name>dfs.namenode.kerberos.internal.spnego.principal</name>
+  <value>${dfs.web.authentication.kerberos.principal}</value>
+</property>
+
+<property>
+  <name>dfs.secondary.namenode.kerberos.internal.spnego.principal</name>
+  <value>${dfs.web.authentication.kerberos.principal}</value>
+</property>
+
+
 </configuration>