瀏覽代碼

Merging 995771d025f6fc9a0ceb20ff6f10dac643402403 from bimota-1.1.2
HADOOP-9296. Allow users from different realm to authenticate without a trust relationship. Contributed by Benoy Antony.

Conflicts:

HDP-CHANGES.txt

Arun C. Murthy 12 年之前
父節點
當前提交
f2c561727c

+ 4 - 0
HDP-CHANGES.txt

@@ -42,3 +42,7 @@ Condor release branched from Apache @ https://svn.apache.org/repos/asf/hadoop/co
 
     HDFS-4108. Fix dfsnodelist to work in secure mode. Contributed by Benoy
     Antony.
+ 
+    HADOOP-9296. Allow users from different realm to authenticate without a
+    trust relationship. (Benoy Antony via acmurthy)
+

+ 3 - 0
LONGWING-CHANGES.txt

@@ -7,6 +7,9 @@ LONGWING changes
     HADOOP-8923. Do not show intermediate web-ui page when authentication cookie
     (SPENGO/custom) expires. (Benoy Antony via acmurthy)
 
+    HADOOP-9296. Allow users from different realm to authenticate without a
+    trust relationship. (Benoy Antony via acmurthy)
+
   Changes now available from Apache
 
     HADOOP-8878. Uppercase namenode hostname causes issues with security turned

+ 15 - 4
src/core/org/apache/hadoop/ipc/Client.java

@@ -55,6 +55,7 @@ import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.KerberosInfo;
+import org.apache.hadoop.security.MultiRealmUserAuthentication;
 import org.apache.hadoop.security.SaslRpcClient;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.SecurityUtil;
@@ -273,7 +274,14 @@ public class Client {
         authMethod = AuthMethod.SIMPLE;
       } else if (token != null) {
         authMethod = AuthMethod.DIGEST;
-      } else {
+      } else  if  ( MultiRealmUserAuthentication.isAUserInADifferentRealm (ticket, conf) ){
+        authMethod =   AuthMethod.KERBEROS_USER_REALM;
+        serverPrincipal  = MultiRealmUserAuthentication.replaceRealmWithUserRealm(serverPrincipal, conf);
+        if (LOG.isDebugEnabled()){
+          LOG.debug("AuthMehod is KERBEROS_USER_REALM and serverPrincipal is changed to " + serverPrincipal);
+        }
+      }
+      else {
         authMethod = AuthMethod.KERBEROS;
       }
       
@@ -290,7 +298,8 @@ public class Client {
       this.setDaemon(true);
     }
 
-    /** Update lastActivity with the current time. */
+
+	/** Update lastActivity with the current time. */
     private void touch() {
       lastActivity.set(System.currentTimeMillis());
     }
@@ -380,7 +389,8 @@ public class Client {
       UserGroupInformation currentUser = 
         UserGroupInformation.getCurrentUser();
       UserGroupInformation realUser = currentUser.getRealUser();
-      if (authMethod == AuthMethod.KERBEROS && 
+      if ((authMethod == AuthMethod.KERBEROS || 
+    		  authMethod == AuthMethod.KERBEROS_USER_REALM) && 
           loginUser != null &&
           //Make sure user logged in using Kerberos either keytab or TGT
           loginUser.hasKerberosCredentials() && 
@@ -584,7 +594,8 @@ public class Client {
             final InputStream in2 = inStream;
             final OutputStream out2 = outStream;
             UserGroupInformation ticket = remoteId.getTicket();
-            if (authMethod == AuthMethod.KERBEROS) {
+            if (authMethod == AuthMethod.KERBEROS || 
+                authMethod == AuthMethod.KERBEROS_USER_REALM) {
               if (ticket.getRealUser() != null) {
                 ticket = ticket.getRealUser();
               }

+ 2 - 1
src/core/org/apache/hadoop/ipc/ConnectionHeader.java

@@ -83,7 +83,8 @@ class ConnectionHeader implements Writable {
   public void write(DataOutput out) throws IOException {
     Text.writeString(out, (protocol == null) ? "" : protocol);
     if (ugi != null) {
-      if (authMethod == AuthMethod.KERBEROS) {
+      if (authMethod == AuthMethod.KERBEROS || 
+          authMethod == AuthMethod.KERBEROS_USER_REALM) {
         // Send effective user for Kerberos auth
         out.writeBoolean(true);
         out.writeUTF(ugi.getUserName());

+ 36 - 24
src/core/org/apache/hadoop/ipc/Server.java

@@ -18,6 +18,10 @@
 
 package org.apache.hadoop.ipc;
 
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION;
+import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_DEFAULT;
+import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_KEY;
+
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
@@ -50,10 +54,9 @@ import java.util.Map;
 import java.util.Random;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
+import java.util.concurrent.LinkedBlockingQueue;
 
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
@@ -65,17 +68,17 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
-import static org.apache.hadoop.fs.CommonConfigurationKeys.*;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.ipc.metrics.RpcInstrumentation;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.MultiRealmUserAuthentication;
 import org.apache.hadoop.security.SaslRpcServer;
-import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.SaslRpcServer.SaslDigestCallbackHandler;
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
+import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.ProxyUsers;
@@ -972,27 +975,14 @@ public abstract class Server {
                   SaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler(
                       secretManager, this));
               break;
+            case KERBEROS_USER_REALM:
+              UserGroupInformation ugi = MultiRealmUserAuthentication.getServerUGIForUserRealm(conf);
+              createSaslServer(ugi);
+              break;
             default:
               UserGroupInformation current = UserGroupInformation
-                  .getCurrentUser();
-              String fullName = current.getUserName();
-              if (LOG.isDebugEnabled())
-                LOG.debug("Kerberos principal name is " + fullName);
-              final String names[] = SaslRpcServer.splitKerberosName(fullName);
-              if (names.length != 3) {
-                throw new AccessControlException(
-                    "Kerberos principal name does NOT have the expected "
-                        + "hostname part: " + fullName);
-              }
-              current.doAs(new PrivilegedExceptionAction<Object>() {
-                @Override
-                public Object run() throws SaslException {
-                  saslServer = Sasl.createSaslServer(AuthMethod.KERBEROS
-                      .getMechanismName(), names[0], names[1],
-                      SaslRpcServer.SASL_PROPS, new SaslGssCallbackHandler());
-                  return null;
-                }
-              });
+              .getCurrentUser();
+              createSaslServer(current);
             }
             if (saslServer == null)
               throw new AccessControlException(
@@ -1060,6 +1050,28 @@ public abstract class Server {
         }
       }
     }
+
+    private void createSaslServer(UserGroupInformation ugi)
+        throws AccessControlException, IOException, InterruptedException {
+      String fullName = ugi.getUserName();
+      if (LOG.isDebugEnabled())
+        LOG.debug("Kerberos principal name is " + fullName);
+      final String names[] = SaslRpcServer.splitKerberosName(fullName);
+      if (names.length != 3) {
+        throw new AccessControlException(
+            "Kerberos principal name does NOT have the expected "
+            + "hostname part: " + fullName);
+      }
+      ugi.doAs(new PrivilegedExceptionAction<Object>() {
+        @Override
+        public Object run() throws SaslException {
+          saslServer = Sasl.createSaslServer(AuthMethod.KERBEROS
+              .getMechanismName(), names[0], names[1],
+              SaslRpcServer.SASL_PROPS, new SaslGssCallbackHandler());
+          return null;
+        }
+      });
+    }
     
     private void doSaslReply(SaslStatus status, Writable rv,
         String errorClass, String error) throws IOException {

+ 110 - 0
src/core/org/apache/hadoop/security/MultiRealmUserAuthentication.java

@@ -0,0 +1,110 @@
+package org.apache.hadoop.security;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+
+/** 
+ * Utility class to support users and servers belonging two different realms
+ *
+ */
+
+public class MultiRealmUserAuthentication {
+  private static final Log LOG = LogFactory.getLog(MultiRealmUserAuthentication.class);
+  //configuration property name for the user realm
+  public static String  KERBEROS_USER_REALM ="hadoop.security.authentication.user.realm";
+
+  // class variable used to store the Subject
+  private  static UserGroupInformation  ugi ;
+
+  /**
+   * return the subject for server Principal  in the user realm
+   * This will be the same name as the server principal of the default realm with the
+   *  realm name replaced with the user realm name.
+   *  Once created, the the UGI is cached.
+   * @param conf
+   * @return UserGroupInformation
+   */
+  public static UserGroupInformation getServerUGIForUserRealm (Configuration conf){
+    if (ugi == null) {
+      return getServerUGI (conf.get(KERBEROS_USER_REALM));
+    }
+    return ugi;
+  }
+
+  /**
+   * returns true if this is a user in a different realm than the default 
+   * realm of the Hadoop servers.
+   * returns true if all the following conditions are satisfied
+   * 	a) if there is a different user realm
+   *  b) if the user is not a server
+   *  c) if the user is part of the user realm
+   * @param ticket
+   * @param conf
+   * @return boolean
+   */
+  public static  boolean isAUserInADifferentRealm(UserGroupInformation ticket,
+      Configuration conf) {
+    if (isEnabled (conf)){
+      String fullName = ticket.getUserName();
+      String names[] = SaslRpcServer.splitKerberosName(fullName);
+      //make sure that it is not a server
+      if (names.length < 3) {
+        //check if the principal belongs to user realm
+        if (fullName.toLowerCase().endsWith(conf.get(KERBEROS_USER_REALM).toLowerCase())){
+          return true;
+        }				
+      }	    	 			 
+    }
+    return false;
+  }
+  
+  /**
+   * replaces the realm part of the principal name with the user realm
+   * This method will be invoked by client side
+   * @param principalName
+   * @param conf
+   * @return string value containing server principal in user realm
+   */
+  public static String replaceRealmWithUserRealm(
+      String principalName, Configuration conf ) {
+    return replaceRealm ( principalName, conf.get(KERBEROS_USER_REALM));
+  }
+
+  private static boolean isEnabled (Configuration conf){
+    return (conf.get(KERBEROS_USER_REALM) != null);
+  }
+
+  private static synchronized  UserGroupInformation getServerUGI(String userRealm){
+    UserGroupInformation current;
+    try {
+      current = UserGroupInformation.getCurrentUser();
+      String principalName = current.getUserName();
+
+      String principalInUserRealm = replaceRealm ( principalName, userRealm);
+      ugi = UserGroupInformation.loginServerFromCurrentKeytabAndReturnUGI
+                                               (principalInUserRealm);
+      return ugi;
+    } catch (IOException e) {
+      LOG.warn("Current user information cannot be obtained", e);
+      return null;
+    }
+  }
+
+  private static String replaceRealm(String principalName, String userRealm) {
+    String[] parts = principalName.split("[/@]");
+
+    if (parts.length >2){
+      String[] serverParts = parts[1].split("[.]");
+      String serverName  = serverParts[0] + "." + userRealm.toLowerCase();
+      return parts[0] + "/" +  serverName + "@" + userRealm;
+    }
+    else {
+      LOG.warn ("The serverPrincipal = " + principalName +
+      "doesn't confirm to the standards");
+      throw new IllegalArgumentException();
+    }
+  }
+}

+ 28 - 20
src/core/org/apache/hadoop/security/SaslRpcClient.java

@@ -75,26 +75,9 @@ public class SaslRpcClient {
           SaslRpcServer.SASL_PROPS, new SaslClientCallbackHandler(token));
       break;
     case KERBEROS:
-      if (LOG.isDebugEnabled()) {
-        LOG
-            .debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName()
-                + " client. Server's Kerberos principal name is "
-                + serverPrincipal);
-      }
-      if (serverPrincipal == null || serverPrincipal.length() == 0) {
-        throw new IOException(
-            "Failed to specify server's Kerberos principal name");
-      }
-      String names[] = SaslRpcServer.splitKerberosName(serverPrincipal);
-      if (names.length != 3) {
-        throw new IOException(
-          "Kerberos principal name does NOT have the expected hostname part: "
-                + serverPrincipal);
-      }
-      saslClient = Sasl.createSaslClient(new String[] { AuthMethod.KERBEROS
-          .getMechanismName() }, null, names[0], names[1],
-          SaslRpcServer.SASL_PROPS, null);
-      break;
+    case KERBEROS_USER_REALM:
+    	saslClient = createKerberosClient(serverPrincipal);
+        break;
     default:
       throw new IOException("Unknown authentication method " + method);
     }
@@ -102,6 +85,31 @@ public class SaslRpcClient {
       throw new IOException("Unable to find SASL client implementation");
   }
 
+
+  private SaslClient createKerberosClient(String serverPrincipal) throws IOException,
+  SaslException {
+    if (LOG.isDebugEnabled()) {
+      LOG
+      .debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName()
+          + " client. Server's Kerberos principal name is "
+          + serverPrincipal);
+    }
+    if (serverPrincipal == null || serverPrincipal.length() == 0) {
+      throw new IOException(
+      "Failed to specify server's Kerberos principal name");
+    }
+    String names[] = SaslRpcServer.splitKerberosName(serverPrincipal);
+    if (names.length != 3) {
+      throw new IOException(
+          "Kerberos principal name does NOT have the expected hostname part: "
+          + serverPrincipal);
+    }
+    return Sasl.createSaslClient(new String[] { AuthMethod.KERBEROS
+        .getMechanismName() }, null, names[0], names[1],
+        SaslRpcServer.SASL_PROPS, null);
+  }
+
+
   private static void readStatus(DataInputStream inStream) throws IOException {
     int status = inStream.readInt(); // read status
     if (status != SaslStatus.SUCCESS.state) {

+ 4 - 3
src/core/org/apache/hadoop/security/SaslRpcServer.java

@@ -40,10 +40,10 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.Server;
-import org.apache.hadoop.security.token.SecretManager;
-import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
+import org.apache.hadoop.security.token.TokenIdentifier;
 
 /**
  * A utility class for dealing with SASL on RPC server
@@ -133,7 +133,8 @@ public class SaslRpcServer {
   public static enum AuthMethod {
     SIMPLE((byte) 80, "", AuthenticationMethod.SIMPLE),
     KERBEROS((byte) 81, "GSSAPI", AuthenticationMethod.KERBEROS),
-    DIGEST((byte) 82, "DIGEST-MD5", AuthenticationMethod.TOKEN);
+    DIGEST((byte) 82, "DIGEST-MD5", AuthenticationMethod.TOKEN),
+    KERBEROS_USER_REALM((byte) 83, "GSSAPI", AuthenticationMethod.KERBEROS);
 
     /** The code for this method. */
     public final byte code;

+ 48 - 6
src/core/org/apache/hadoop/security/UserGroupInformation.java

@@ -351,6 +351,8 @@ public class UserGroupInformation {
       "hadoop-user-kerberos";
     private static final String KEYTAB_KERBEROS_CONFIG_NAME = 
       "hadoop-keytab-kerberos";
+    private static final String SERVER_KEYTAB_KERBEROS_CONFIG_NAME = 
+      "server-hadoop-keytab-kerberos";
     
     private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
       new AppConfigurationEntry(OS_LOGIN_MODULE_NAME,
@@ -387,6 +389,19 @@ public class UserGroupInformation {
       new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
                                 LoginModuleControlFlag.REQUIRED,
                                 KEYTAB_KERBEROS_OPTIONS);
+    private static final Map<String,String> SERVER_KEYTAB_KERBEROS_OPTIONS = 
+      new HashMap<String,String>();
+    static {
+      SERVER_KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true");
+      SERVER_KEYTAB_KERBEROS_OPTIONS.put("useKeyTab", "true");
+      SERVER_KEYTAB_KERBEROS_OPTIONS.put("storeKey", "true");
+      SERVER_KEYTAB_KERBEROS_OPTIONS.put("isInitiator", "false");
+    }
+
+    private static final AppConfigurationEntry SERVER_KEYTAB_KERBEROS_LOGIN =
+      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
+          LoginModuleControlFlag.REQUIRED,
+          SERVER_KEYTAB_KERBEROS_OPTIONS);
     
     private static final AppConfigurationEntry[] SIMPLE_CONF = 
       new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HADOOP_LOGIN};
@@ -397,6 +412,10 @@ public class UserGroupInformation {
 
     private static final AppConfigurationEntry[] KEYTAB_KERBEROS_CONF =
       new AppConfigurationEntry[]{KEYTAB_KERBEROS_LOGIN, HADOOP_LOGIN};
+    
+    private static final AppConfigurationEntry[] SERVER_KEYTAB_KERBEROS_CONF =
+      new AppConfigurationEntry[]{SERVER_KEYTAB_KERBEROS_LOGIN, HADOOP_LOGIN};
+
 
     @Override
     public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
@@ -409,6 +428,11 @@ public class UserGroupInformation {
         KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
         return KEYTAB_KERBEROS_CONF;
       }
+      else if (SERVER_KEYTAB_KERBEROS_CONFIG_NAME.equals(appName)) {
+        SERVER_KEYTAB_KERBEROS_OPTIONS.put("keyTab", keytabFile);
+        SERVER_KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
+        return SERVER_KEYTAB_KERBEROS_CONF;
+      }      
       return null;
     }
   }
@@ -700,7 +724,6 @@ public class UserGroupInformation {
     } 
   }
 
-
   /**
    * Log a user in from a keytab file. Loads a user identity from a keytab
    * file and login them in. This new user does not affect the currently
@@ -711,8 +734,28 @@ public class UserGroupInformation {
    */
   public synchronized
   static UserGroupInformation loginUserFromKeytabAndReturnUGI(String user,
-                                  String path
-                                  ) throws IOException {
+                                  String path) throws IOException {
+    return loginUserFromKeytabAndReturnUGI(user, path, 
+        HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME);
+  }
+  
+  /**
+   * Log in a Server using the current keytab.
+   * This will not make a call to the KDC
+   * @param server the principal name to load from the keytab
+   * @throws IOException if the keytab file can't be read
+   */
+  public synchronized
+  static UserGroupInformation loginServerFromCurrentKeytabAndReturnUGI(String server) 
+    throws IOException {
+    return loginUserFromKeytabAndReturnUGI(server, keytabFile, 
+        HadoopConfiguration.SERVER_KEYTAB_KERBEROS_CONFIG_NAME);
+  }
+  
+  private synchronized
+  static UserGroupInformation loginUserFromKeytabAndReturnUGI(String user,
+                                  String path,
+                                  String hadoopConfiguration) throws IOException {
     if (!isSecurityEnabled())
       return UserGroupInformation.getCurrentUser();
     String oldKeytabFile = null;
@@ -726,8 +769,7 @@ public class UserGroupInformation {
       keytabPrincipal = user;
       Subject subject = new Subject();
       
-      LoginContext login = 
-        newLoginContext(HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME, subject); 
+      LoginContext login = newLoginContext(hadoopConfiguration, subject); 
        
       start = System.currentTimeMillis();
       login.login();
@@ -748,7 +790,7 @@ public class UserGroupInformation {
       if(oldKeytabPrincipal != null) keytabPrincipal = oldKeytabPrincipal;
     }
   }
-
+  
   /**
    * Re-login a user from keytab if TGT is expired or is close to expiry.
    * 

+ 39 - 0
src/test/org/apache/hadoop/security/TestMultiRealmUserAuthentication.java

@@ -0,0 +1,39 @@
+package org.apache.hadoop.security;
+
+import static org.junit.Assert.*;
+
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Test;
+
+public class TestMultiRealmUserAuthentication {
+
+  @Test
+  public void testReplaceRealmWithUserRealm(){
+
+    Configuration conf = new Configuration();
+
+    conf.set(MultiRealmUserAuthentication.KERBEROS_USER_REALM, "CORP.COM");
+
+    String replaced = MultiRealmUserAuthentication.
+    replaceRealmWithUserRealm("hadoop/hostname@HADOOP.COM", conf);
+    assertEquals ("hadoop/hostname@CORP.COM",replaced  );
+  }
+
+  @Test
+  public void testReplaceRealmWithUserRealmWithAUserPrincipal(){
+
+    Configuration conf = new Configuration();
+
+    conf.set(MultiRealmUserAuthentication.KERBEROS_USER_REALM, "CORP.COM");
+
+    try {
+      String replaced = MultiRealmUserAuthentication.
+      replaceRealmWithUserRealm("hadoop/HADOOP.COM", conf);
+      fail ();
+    }
+    catch (IllegalArgumentException e){
+      //this is expected
+    }
+  }
+
+}