Sfoglia il codice sorgente

HADOOP-6929. Backport changes to MR-279 (mahadev and owen)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/MR-279@1150294 13f79535-47bb-0310-9956-ffa450edef68
Mahadev Konar 13 anni fa
parent
commit
e56f773414
42 ha cambiato i file con 219 aggiunte e 113 eliminazioni
  1. 5 0
      common/build.xml
  2. 0 9
      common/src/java/core-default.xml
  3. 0 3
      common/src/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
  4. 3 10
      common/src/java/org/apache/hadoop/ipc/Client.java
  5. 3 2
      common/src/java/org/apache/hadoop/security/AnnotatedSecurityInfo.java
  6. 9 2
      common/src/java/org/apache/hadoop/security/SecurityInfo.java
  7. 55 17
      common/src/java/org/apache/hadoop/security/SecurityUtil.java
  8. 1 9
      common/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
  9. 11 2
      common/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
  10. 19 15
      common/src/test/core/org/apache/hadoop/ipc/TestSaslRPC.java
  11. 2 0
      mapreduce/CHANGES.txt
  12. 1 1
      mapreduce/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java
  13. 2 1
      mapreduce/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java
  14. 1 1
      mapreduce/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
  15. 3 2
      mapreduce/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java
  16. 2 1
      mapreduce/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
  17. 3 2
      mapreduce/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
  18. 1 1
      mapreduce/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java
  19. 3 0
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
  20. 0 1
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java
  21. 3 2
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java
  22. 3 3
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnRPC.java
  23. 5 5
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/ProtoOverHadoopRpcEngine.java
  24. 47 0
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/TunnelProtocolSecurityInfo.java
  25. 3 2
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/security/ContainerManagerSecurityInfo.java
  26. 3 2
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/security/SchedulerSecurityInfo.java
  27. 3 2
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/security/admin/AdminSecurityInfo.java
  28. 3 2
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientRMSecurityInfo.java
  29. 1 0
      mapreduce/yarn/yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo
  30. 3 2
      mapreduce/yarn/yarn-server/yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/RMNMSecurityInfoClass.java
  31. 2 1
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java
  32. 2 1
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
  33. 2 1
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java
  34. 2 1
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
  35. 3 2
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/security/LocalizerSecurityInfo.java
  36. 2 1
      mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java
  37. 1 1
      mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java
  38. 1 1
      mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
  39. 2 1
      mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
  40. 1 1
      mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/AMLauncher.java
  41. 1 1
      mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/tools/RMAdmin.java
  42. 2 2
      mapreduce/yarn/yarn-server/yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerTokenSecretManager.java

+ 5 - 0
common/build.xml

@@ -454,6 +454,11 @@
     <property name="jar.properties.list" value="commons-logging.properties, log4j.properties, hadoop-metrics.properties" />
     <jar jarfile="${build.dir}/${final.name}.jar"
          basedir="${build.classes}">
+      <service type="org.apache.hadoop.security.SecurityInfo">
+        <provider 
+           classname="org.apache.hadoop.security.AnnotatedSecurityInfo"/>
+      </service>
+
       <manifest>
         <section name="org/apache/hadoop">
           <attribute name="Implementation-Title" value="${ant.project.name}"/>

+ 0 - 9
common/src/java/core-default.xml

@@ -97,15 +97,6 @@
   </description>
 </property>
 
-<property>
-  <name>hadoop.security.info.class.name</name>
-  <value>org.apache.hadoop.security.AnnotatedSecurityInfo</value>
-  <description>
-    Implementation of org.apache.hadoop.security.SecurityInfo interface to 
-    be used by RPC for a given protocol.
-  </description>
-</property>
-
 <property>
   <name>hadoop.rpc.protection</name>
   <value>authentication</value>

+ 0 - 3
common/src/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java

@@ -216,8 +216,5 @@ public class CommonConfigurationKeysPublic {
   /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
   public static final String  HADOOP_SECURITY_SERVICE_USER_NAME_KEY =
     "hadoop.security.service.user.name.key";
-  /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
-  public static final String  HADOOP_SECURITY_INFO_CLASS_NAME =
-    "hadoop.security.info.class.name";
 }
 

+ 3 - 10
common/src/java/org/apache/hadoop/ipc/Client.java

@@ -58,10 +58,8 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.AnnotatedSecurityInfo;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SaslRpcClient;
-import org.apache.hadoop.security.SecurityInfo;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -257,8 +255,7 @@ public class Client {
       this.useSasl = UserGroupInformation.isSecurityEnabled();
       LOG.debug("Protocol is " + protocol + " useSasl is " + useSasl);
       if (useSasl && protocol != null) {
-        TokenInfo tokenInfo = SecurityUtil.getSecurityInfo(
-            remoteId.conf).getTokenInfo(protocol);
+        TokenInfo tokenInfo = SecurityUtil.getTokenInfo(protocol, remoteId.conf);
         if (tokenInfo != null) {
           TokenSelector<? extends TokenIdentifier> tokenSelector = null;
           try {
@@ -273,10 +270,7 @@ public class Client {
               .getHostAddress() + ":" + addr.getPort()), 
               ticket.getTokens());
         }
-        KerberosInfo krbInfo = SecurityUtil.getSecurityInfo(
-            remoteId.conf).getKerborosInfo(protocol);
-        LOG.debug("securityinfo class is " + SecurityUtil.getSecurityInfo(
-            remoteId.conf).getClass().getCanonicalName());
+        KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, remoteId.conf);
         LOG.debug("KerberosInfo object's class is " + krbInfo);
         if (krbInfo != null) {
           serverPrincipal = remoteId.getServerPrincipal();
@@ -1295,8 +1289,7 @@ public class Client {
       if (!UserGroupInformation.isSecurityEnabled() || protocol == null) {
         return null;
       }
-      KerberosInfo krbInfo = SecurityUtil.getSecurityInfo(
-          conf).getKerborosInfo(protocol);
+      KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, conf);
       if (krbInfo != null) {
         String serverKey = krbInfo.serverPrincipal();
         if (serverKey == null) {

+ 3 - 2
common/src/java/org/apache/hadoop/security/AnnotatedSecurityInfo.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.security;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.token.TokenInfo;
 
 /**
@@ -26,12 +27,12 @@ import org.apache.hadoop.security.token.TokenInfo;
 public class AnnotatedSecurityInfo implements SecurityInfo {
 
   @Override
-  public KerberosInfo getKerborosInfo(Class<?> protocol) {
+  public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
     return protocol.getAnnotation(KerberosInfo.class);
   }
 
   @Override
-  public TokenInfo getTokenInfo(Class<?> protocol) {
+  public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
     return protocol.getAnnotation(TokenInfo.class);
   }
 

+ 9 - 2
common/src/java/org/apache/hadoop/security/SecurityInfo.java

@@ -18,8 +18,13 @@
 
 package org.apache.hadoop.security;
 
+import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
+import org.apache.hadoop.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.token.TokenInfo;
 
+@Evolving
+@LimitedPrivate({"MapReduce"})
 /**
  * Interface used by RPC to get the Security information for a given 
  * protocol.
@@ -29,15 +34,17 @@ public interface SecurityInfo {
   /**
    * Get the KerberosInfo for a given protocol.
    * @param protocol interface class
+   * @param conf configuration object
    * @return KerberosInfo
    */
-  KerberosInfo getKerborosInfo(Class<?> protocol);
+  KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf);
 
   /**
    * Get the TokenInfo for a given protocol.
    * @param protocol interface class
+   * @param conf configuration object
    * @return TokenInfo instance
    */
-  TokenInfo getTokenInfo(Class<?> protocol);
+  TokenInfo getTokenInfo(Class<?> protocol, Configuration conf);
 
 }

+ 55 - 17
common/src/java/org/apache/hadoop/security/SecurityUtil.java

@@ -22,6 +22,7 @@ import java.net.URI;
 import java.net.URL;
 import java.net.UnknownHostException;
 import java.security.AccessController;
+import java.util.ServiceLoader;
 import java.util.Set;
 
 import javax.security.auth.Subject;
@@ -33,8 +34,8 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.token.TokenInfo;
 
 import sun.security.jgss.krb5.Krb5Util;
 import sun.security.krb5.Credentials;
@@ -283,26 +284,63 @@ public class SecurityUtil {
     return sb.toString();
   }
 
-  @SuppressWarnings("unchecked")
+  private static ServiceLoader<SecurityInfo> securityInfoProviders = 
+      ServiceLoader.load(SecurityInfo.class);
+  private static SecurityInfo[] testProviders = new SecurityInfo[0];
+
   /**
-   * Construct the SecurityInfo instance from the given conf for a 
-   * protocol.
-   * @param conf Configuration object with which the protocol is registered.
+   * Test setup method to register additional providers.
+   * @param providers a list of high priority providers to use
    */
-  public static SecurityInfo getSecurityInfo(Configuration conf)
-      throws IOException {
-    try {
-      Class<SecurityInfo> secInfoClass = (Class<SecurityInfo>) 
-      conf.getClass(
-        CommonConfigurationKeysPublic.HADOOP_SECURITY_INFO_CLASS_NAME, 
-        AnnotatedSecurityInfo.class);
-      SecurityInfo secInfo = secInfoClass.newInstance();
-      return secInfo;
-    } catch (Exception e) {
-      throw new IOException("Can't create the SecurityInfo instance", e);
+  @InterfaceAudience.Private
+  public static void setSecurityInfoProviders(SecurityInfo... providers) {
+    testProviders = providers;
+  }
+  
+  /**
+   * Look up the KerberosInfo for a given protocol. It searches all known
+   * SecurityInfo providers.
+   * @param protocol the protocol class to get the information for
+   * @return the KerberosInfo or null if it has no KerberosInfo defined
+   */
+  public static KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
+    for(SecurityInfo provider: testProviders) {
+      KerberosInfo result = provider.getKerberosInfo(protocol, conf);
+      if (result != null) {
+        return result;
+      }
     }
+    for(SecurityInfo provider: securityInfoProviders) {
+      KerberosInfo result = provider.getKerberosInfo(protocol, conf);
+      if (result != null) {
+        return result;
+      }
+    }
+    return null;
+  }
+ 
+  /**
+   * Look up the TokenInfo for a given protocol. It searches all known
+   * SecurityInfo providers.
+   * @param protocol The protocol class to get the information for.
+   * @param conf configuration object
+   * @return the TokenInfo or null if it has no KerberosInfo defined
+   */
+  public static TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
+    for(SecurityInfo provider: testProviders) {
+      TokenInfo result = provider.getTokenInfo(protocol, conf);
+      if (result != null) {
+        return result;
+      }      
+    }
+    for(SecurityInfo provider: securityInfoProviders) {
+      TokenInfo result = provider.getTokenInfo(protocol, conf);
+      if (result != null) {
+        return result;
+      }
+    } 
+    return null;
   }
-
   
   /**
    * Get the host name from the principal name of format <service>/host@realm.

+ 1 - 9
common/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java

@@ -41,8 +41,6 @@ import org.apache.hadoop.security.UserGroupInformation;
 @InterfaceStability.Evolving
 public class ServiceAuthorizationManager {
   private static final String HADOOP_POLICY_FILE = "hadoop-policy.xml";
-  private static final Log LOG = LogFactory
-  .getLog(ServiceAuthorizationManager.class);
 
   private Map<Class<?>, AccessControlList> protocolToAcl =
     new IdentityHashMap<Class<?>, AccessControlList>();
@@ -86,13 +84,7 @@ public class ServiceAuthorizationManager {
     }
     
     // get client principal key to verify (if available)
-    KerberosInfo krbInfo;
-    try {
-      krbInfo = SecurityUtil.getSecurityInfo(
-          conf).getKerborosInfo(protocol);
-    } catch (IOException e1) {
-      throw new AuthorizationException(e1);
-    }
+    KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, conf);
     String clientPrincipal = null; 
     if (krbInfo != null) {
       String clientKey = krbInfo.clientPrincipal();

+ 11 - 2
common/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java

@@ -40,6 +40,8 @@ import org.apache.hadoop.ipc.TestSaslRPC.TestTokenIdentifier;
 import org.apache.hadoop.ipc.TestSaslRPC.TestTokenSecretManager;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.SaslRpcServer;
+import org.apache.hadoop.security.SecurityInfo;
+import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 
@@ -129,6 +131,7 @@ public class TestAvroRpc extends TestCase {
       assertTrue(caught);
 
     } finally {
+      clearSecure();
       server.stop();
     }
   }
@@ -138,8 +141,13 @@ public class TestAvroRpc extends TestCase {
     conf.set("hadoop.rpc.socket.factory.class.default", "");
     //Avro doesn't work with security annotations on protocol.
     //Avro works ONLY with custom security context
-    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_INFO_CLASS_NAME,
-        CustomSecurityInfo.class.getName());
+    SecurityUtil.setSecurityInfoProviders(new SecurityInfo[] {
+        new CustomSecurityInfo()
+    });
+  }
+
+  private void clearSecure() {
+    SecurityUtil.setSecurityInfoProviders(new SecurityInfo[0]);
   }
 
   private void addToken(TestTokenSecretManager sm, 
@@ -191,6 +199,7 @@ public class TestAvroRpc extends TestCase {
       assertEquals(3, intResult);
 
     } finally {
+      clearSecure();
       server.stop();
     }
   }

+ 19 - 15
common/src/test/core/org/apache/hadoop/ipc/TestSaslRPC.java

@@ -191,10 +191,10 @@ public class TestSaslRPC {
     }
   }
 
-  public static class CustomSecurityInfo implements SecurityInfo {
+  public static class CustomSecurityInfo  implements SecurityInfo {
 
     @Override
-    public KerberosInfo getKerborosInfo(Class<?> protocol) {
+    public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
       return new KerberosInfo() {
         @Override
         public Class<? extends Annotation> annotationType() {
@@ -212,7 +212,7 @@ public class TestSaslRPC {
     }
 
     @Override
-    public TokenInfo getTokenInfo(Class<?> protocol) {
+    public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
       return new TokenInfo() {
         @Override
         public Class<? extends TokenSelector<? extends 
@@ -233,19 +233,24 @@ public class TestSaslRPC {
     final Server server = RPC.getServer(TestSaslProtocol.class,
         new TestSaslImpl(), ADDRESS, 0, 5, true, conf, sm);
 
-    doDigestRpc(server, sm, conf);
+    doDigestRpc(server, sm);
   }
 
   @Test
   public void testDigestRpcWithoutAnnotation() throws Exception {
     TestTokenSecretManager sm = new TestTokenSecretManager();
     Configuration conf1 = new Configuration(conf);
-    conf1.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_INFO_CLASS_NAME,
-        CustomSecurityInfo.class.getName());
-    final Server server = RPC.getServer(TestSaslProtocol.class,
-        new TestSaslImpl(), ADDRESS, 0, 5, true, conf1, sm);
+    try {
+      SecurityUtil.setSecurityInfoProviders(new SecurityInfo[] {
+        new CustomSecurityInfo()
+      });
+      final Server server = RPC.getServer(TestSaslProtocol.class,
+          new TestSaslImpl(), ADDRESS, 0, 5, true, conf, sm);
 
-    doDigestRpc(server, sm, conf1);
+      doDigestRpc(server, sm);
+    } finally {
+      SecurityUtil.setSecurityInfoProviders(new SecurityInfo[0]);
+    }
   }
 
   @Test
@@ -254,7 +259,7 @@ public class TestSaslRPC {
         new TestSaslImpl(), ADDRESS, 0, 5, true, conf, null);
     server.disableSecurity();
     TestTokenSecretManager sm = new TestTokenSecretManager();
-    doDigestRpc(server, sm, conf);
+    doDigestRpc(server, sm);
   }
   
   @Test
@@ -265,7 +270,7 @@ public class TestSaslRPC {
 
     boolean succeeded = false;
     try {
-      doDigestRpc(server, sm, conf);
+      doDigestRpc(server, sm);
     } catch (RemoteException e) {
       LOG.info("LOGGING MESSAGE: " + e.getLocalizedMessage());
       assertTrue(ERROR_MESSAGE.equals(e.getLocalizedMessage()));
@@ -275,9 +280,8 @@ public class TestSaslRPC {
     assertTrue(succeeded);
   }
   
-  private void doDigestRpc(Server server, TestTokenSecretManager sm, 
-      Configuration config)
-      throws Exception {
+  private void doDigestRpc(Server server, TestTokenSecretManager sm
+                           ) throws Exception {
     server.start();
 
     final UserGroupInformation current = UserGroupInformation.getCurrentUser();
@@ -295,7 +299,7 @@ public class TestSaslRPC {
     TestSaslProtocol proxy = null;
     try {
       proxy = (TestSaslProtocol) RPC.getProxy(TestSaslProtocol.class,
-          TestSaslProtocol.versionID, addr, config);
+          TestSaslProtocol.versionID, addr, conf);
       //QOP must be auth
       Assert.assertEquals(SaslRpcServer.SASL_PROPS.get(Sasl.QOP), "auth");
       proxy.ping();

+ 2 - 0
mapreduce/CHANGES.txt

@@ -4,6 +4,8 @@ Trunk (unreleased changes)
 
 
     MAPREDUCE-279
+
+    HADOOP-6929. Backport changes to MR-279 (mahadev and owen)
     
     MAPREDUCE-2661. Fix TaskImpl to not access MapTaskImpl. (Ahmed Radwan 
     via sharad)  

+ 1 - 1
mapreduce/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java

@@ -136,7 +136,7 @@ public class MRClientService extends AbstractService
           new ApplicationTokenIdentifier(this.appContext.getApplicationID());
       secretManager.setMasterKey(identifier, bytes);
       conf.setClass(
-          CommonConfigurationKeysPublic.HADOOP_SECURITY_INFO_CLASS_NAME,
+          YarnConfiguration.YARN_SECURITY_INFO,
           SchedulerSecurityInfo.class, SecurityInfo.class); // Same for now.
     }
     server =

+ 2 - 1
mapreduce/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java

@@ -52,6 +52,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.StopContainerRequest;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.ContainerToken;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
@@ -89,7 +90,7 @@ public class ContainerLauncherImpl extends AbstractService implements
     // doesn't affect the original configuration
     Configuration myLocalConfig = new Configuration(conf);
     myLocalConfig.setClass(
-        CommonConfigurationKeysPublic.HADOOP_SECURITY_INFO_CLASS_NAME,
+        YarnConfiguration.YARN_SECURITY_INFO,
         ContainerManagerSecurityInfo.class, SecurityInfo.class);
     this.recordFactory = RecordFactoryProvider.getRecordFactory(conf);
     super.init(myLocalConfig);

+ 1 - 1
mapreduce/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java

@@ -246,7 +246,7 @@ public class RMCommunicator extends AbstractService  {
     }
 
     if (UserGroupInformation.isSecurityEnabled()) {
-      conf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+      conf.setClass(YarnConfiguration.YARN_SECURITY_INFO,
           SchedulerSecurityInfo.class, SecurityInfo.class);
 
       String tokenURLEncodedStr = System.getenv().get(

+ 3 - 2
mapreduce/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.mapreduce.v2.security.client;
 
 import java.lang.annotation.Annotation;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHConfig;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityInfo;
@@ -28,7 +29,7 @@ import org.apache.hadoop.security.token.TokenInfo;
 public class ClientHSSecurityInfo implements SecurityInfo {
 
   @Override
-  public KerberosInfo getKerborosInfo(Class<?> protocol) {
+  public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
     return new KerberosInfo() {
 
       @Override
@@ -49,7 +50,7 @@ public class ClientHSSecurityInfo implements SecurityInfo {
   }
 
   @Override
-  public TokenInfo getTokenInfo(Class<?> protocol) {
+  public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
     return null;
   }
 

+ 2 - 1
mapreduce/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java

@@ -69,6 +69,7 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.SecurityInfo;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -103,7 +104,7 @@ public class HistoryClientService extends AbstractService {
     YarnRPC rpc = YarnRPC.create(getConfig());
     Configuration conf = new Configuration(getConfig());
     conf.setClass(
-        CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+        YarnConfiguration.YARN_SECURITY_INFO,
         ClientHSSecurityInfo.class, SecurityInfo.class);
     initializeWebApp(getConfig());
     String serviceAddr = conf.get(JHConfig.HS_BIND_ADDRESS,

+ 3 - 2
mapreduce/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java

@@ -55,6 +55,7 @@ import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -191,7 +192,7 @@ public class ClientServiceDelegate {
       public MRClientProtocol run() {
         Configuration myConf = new Configuration(conf);
         myConf.setClass(
-            CommonConfigurationKeysPublic.HADOOP_SECURITY_INFO_CLASS_NAME,
+            YarnConfiguration.YARN_SECURITY_INFO,
             SchedulerSecurityInfo.class, SecurityInfo.class); 
         YarnRPC rpc = YarnRPC.create(myConf);
         return (MRClientProtocol) rpc.getProxy(MRClientProtocol.class,
@@ -206,7 +207,7 @@ public class ClientServiceDelegate {
     LOG.trace("Connecting to HistoryServer at: " + serviceAddr);
     Configuration myConf = new Configuration(conf);
     //TODO This should ideally be using it's own class (instead of ClientRMSecurityInfo)
-    myConf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+    myConf.setClass(YarnConfiguration.YARN_SECURITY_INFO,
         ClientRMSecurityInfo.class, SecurityInfo.class);
     YarnRPC rpc = YarnRPC.create(myConf);
     realProxy = (MRClientProtocol) rpc.getProxy(MRClientProtocol.class,

+ 1 - 1
mapreduce/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java

@@ -91,7 +91,7 @@ public class ResourceMgrDelegate {
     LOG.info("Connecting to ResourceManager at " + rmAddress);
     Configuration appsManagerServerConf = new Configuration(this.conf);
     appsManagerServerConf.setClass(
-        CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+        YarnConfiguration.YARN_SECURITY_INFO,
         ClientRMSecurityInfo.class, SecurityInfo.class);
     applicationsManager =
         (ClientRMProtocol) rpc.getProxy(ClientRMProtocol.class,

+ 3 - 0
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java

@@ -39,6 +39,9 @@ public class YarnConfiguration extends Configuration {
 
   public static final String APPSMANAGER_ADDRESS = RM_PREFIX
       + "appsManager.address";
+  
+  public static final String YARN_SECURITY_INFO = 
+      "yarn.security.info.class.name";
 
   public static final String DEFAULT_APPSMANAGER_BIND_ADDRESS =
       "0.0.0.0:8040";

+ 0 - 1
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java

@@ -2,7 +2,6 @@ package org.apache.hadoop.yarn.factory.providers;
 
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
-import java.security.InvalidParameterException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.YarnException;

+ 3 - 2
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java

@@ -10,6 +10,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.factory.providers.RpcFactoryProvider;
 
 /**
@@ -29,7 +30,7 @@ public class HadoopYarnProtoRPC extends YarnRPC {
     Configuration myConf = new Configuration(conf);
     LOG.info("Creating a HadoopYarnProtoRpc proxy for protocol " + protocol);
     LOG.debug("Configured SecurityInfo class name is "
-        + myConf.get(CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME));
+        + myConf.get(YarnConfiguration.YARN_SECURITY_INFO));
     
     return RpcFactoryProvider.getClientFactory(myConf).getClient(protocol, 1, addr, myConf);
   }
@@ -42,7 +43,7 @@ public class HadoopYarnProtoRPC extends YarnRPC {
     LOG.info("Creating a HadoopYarnProtoRpc server for protocol " + protocol + 
         " with " + numHandlers + " handlers");
     LOG.info("Configured SecurityInfo class name is "
-        + conf.get(CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME));
+        + conf.get(YarnConfiguration.YARN_SECURITY_INFO));
     
     final RPC.Server hadoopServer;
     hadoopServer = 

+ 3 - 3
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnRPC.java

@@ -25,12 +25,12 @@ import org.apache.avro.ipc.Server;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.ipc.AvroSpecificRpcEngine;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 
 /**
  * This uses Hadoop RPC. Uses a tunnel AvroSpecificRpcEngine over 
@@ -49,7 +49,7 @@ public class HadoopYarnRPC extends YarnRPC {
     Configuration myConf = new Configuration(conf);
     LOG.info("Creating a HadoopYarnRpc proxy for protocol " + protocol);
     LOG.debug("Configured SecurityInfo class name is "
-        + myConf.get(CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME));
+        + myConf.get(YarnConfiguration.YARN_SECURITY_INFO));
     RPC.setProtocolEngine(myConf, protocol, AvroSpecificRpcEngine.class);
     try {
       return RPC.getProxy(protocol, 1, addr, myConf);
@@ -66,7 +66,7 @@ public class HadoopYarnRPC extends YarnRPC {
     LOG.info("Creating a HadoopYarnRpc server for protocol " + protocol + 
         " with " + numHandlers + " handlers");
     LOG.info("Configured SecurityInfo class name is "
-        + conf.get(CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME));
+        + conf.get(YarnConfiguration.YARN_SECURITY_INFO));
     RPC.setProtocolEngine(conf, protocol, AvroSpecificRpcEngine.class);
     final RPC.Server hadoopServer;
     try {

+ 5 - 5
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/ProtoOverHadoopRpcEngine.java

@@ -44,7 +44,7 @@ public class ProtoOverHadoopRpcEngine implements RpcEngine {
   private static final RpcEngine ENGINE = new WritableRpcEngine();
 
   /** Tunnel a Proto RPC request and response through Hadoop's RPC. */
-  private static interface TunnelProtocol extends VersionedProtocol {
+  public static interface TunnelProtocol extends VersionedProtocol {
     /** WritableRpcEngine requires a versionID */
     public static final long versionID = 1L;
 
@@ -249,13 +249,13 @@ public class ProtoOverHadoopRpcEngine implements RpcEngine {
   /**
    * Writable Wrapper for Protocol Buffer Responses
    */
-  private static class ProtoSpecificResponseWritable implements Writable {
+  public static class ProtoSpecificResponseWritable implements Writable {
     ProtoSpecificRpcResponse message;
 
     public ProtoSpecificResponseWritable() {
     }
     
-    ProtoSpecificResponseWritable(ProtoSpecificRpcResponse message) {
+    public ProtoSpecificResponseWritable(ProtoSpecificRpcResponse message) {
       this.message = message;
     }
 
@@ -279,13 +279,13 @@ public class ProtoOverHadoopRpcEngine implements RpcEngine {
   /**
    * Writable Wrapper for Protocol Buffer Requests
    */
-  private static class ProtoSpecificRequestWritable implements Writable {
+  public static class ProtoSpecificRequestWritable implements Writable {
     ProtoSpecificRpcRequest message;
 
     public ProtoSpecificRequestWritable() {
     }
     
-    ProtoSpecificRequestWritable(ProtoSpecificRpcRequest message) {
+    public ProtoSpecificRequestWritable(ProtoSpecificRpcRequest message) {
       this.message = message;
     }
 

+ 47 - 0
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/TunnelProtocolSecurityInfo.java

@@ -0,0 +1,47 @@
+package org.apache.hadoop.yarn.ipc;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.AnnotatedSecurityInfo;
+import org.apache.hadoop.security.KerberosInfo;
+import org.apache.hadoop.security.SecurityInfo;
+import org.apache.hadoop.security.token.TokenInfo;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.ipc.ProtoOverHadoopRpcEngine.TunnelProtocol;
+
+public class TunnelProtocolSecurityInfo implements SecurityInfo {
+  public static final Log LOG = LogFactory.getLog(TunnelProtocolSecurityInfo.class);
+  
+  @Override
+  public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
+    LOG.info("Get kerberos info being called, Tunnelprotocolinfo " + protocol);
+    if (TunnelProtocol.class.equals(protocol)) {
+      try {
+        LOG.info("The Tunnel Security info class " + conf.get(YarnConfiguration.YARN_SECURITY_INFO));
+        Class<SecurityInfo> secInfoClass = (Class<SecurityInfo>)  conf.getClass(
+            YarnConfiguration.YARN_SECURITY_INFO, SecurityInfo.class);
+        SecurityInfo secInfo = secInfoClass.newInstance();
+        return secInfo.getKerberosInfo(protocol, conf);
+      } catch (Exception e) {
+        throw new RuntimeException("Unable to load class", e);
+      }
+    }
+    return null;
+  }
+
+  @Override
+  public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
+    if (TunnelProtocol.class.equals(protocol)) {
+      try {
+        Class<SecurityInfo> secInfoClass = (Class<SecurityInfo>)  conf.getClass(
+            YarnConfiguration.YARN_SECURITY_INFO, AnnotatedSecurityInfo.class);
+        SecurityInfo secInfo = secInfoClass.newInstance();
+        return secInfo.getTokenInfo(protocol, conf);
+      } catch (Exception e) {
+        throw new RuntimeException("Unable to load Yarn Security Info class", e);
+      }
+    }
+    return null;
+  }
+}

+ 3 - 2
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/security/ContainerManagerSecurityInfo.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.security;
 
 import java.lang.annotation.Annotation;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityInfo;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -29,12 +30,12 @@ import org.apache.hadoop.security.token.TokenSelector;
 public class ContainerManagerSecurityInfo implements SecurityInfo {
 
   @Override
-  public KerberosInfo getKerborosInfo(Class<?> protocol) {
+  public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
     return null;
   }
 
   @Override
-  public TokenInfo getTokenInfo(Class<?> protocol) {
+  public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
     return new TokenInfo() {
 
       @Override

+ 3 - 2
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/security/SchedulerSecurityInfo.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.security;
 
 import java.lang.annotation.Annotation;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityInfo;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -29,12 +30,12 @@ import org.apache.hadoop.security.token.TokenSelector;
 public class SchedulerSecurityInfo implements SecurityInfo {
 
   @Override
-  public KerberosInfo getKerborosInfo(Class<?> protocol) {
+  public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
     return null;
   }
 
   @Override
-  public TokenInfo getTokenInfo(Class<?> protocol) {
+  public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
     return new TokenInfo() {
 
       @Override

+ 3 - 2
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/security/admin/AdminSecurityInfo.java

@@ -2,6 +2,7 @@ package org.apache.hadoop.yarn.security.admin;
 
 import java.lang.annotation.Annotation;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityInfo;
 import org.apache.hadoop.security.token.TokenInfo;
@@ -10,7 +11,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 public class AdminSecurityInfo implements SecurityInfo {
 
   @Override
-  public KerberosInfo getKerborosInfo(Class<?> protocol) {
+  public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
     return new KerberosInfo() {
 
       @Override
@@ -31,7 +32,7 @@ public class AdminSecurityInfo implements SecurityInfo {
   }
 
   @Override
-  public TokenInfo getTokenInfo(Class<?> protocol) {
+  public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
     return null;
   }
 

+ 3 - 2
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientRMSecurityInfo.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.security.client;
 
 import java.lang.annotation.Annotation;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityInfo;
 import org.apache.hadoop.security.token.TokenInfo;
@@ -28,7 +29,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 public class ClientRMSecurityInfo implements SecurityInfo {
 
   @Override
-  public KerberosInfo getKerborosInfo(Class<?> protocol) {
+  public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
     return new KerberosInfo() {
 
       @Override
@@ -49,7 +50,7 @@ public class ClientRMSecurityInfo implements SecurityInfo {
   }
 
   @Override
-  public TokenInfo getTokenInfo(Class<?> protocol) {
+  public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
     return null;
   }
 

+ 1 - 0
mapreduce/yarn/yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo

@@ -0,0 +1 @@
+org.apache.hadoop.yarn.ipc.TunnelProtocolSecurityInfo

+ 3 - 2
mapreduce/yarn/yarn-server/yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/RMNMSecurityInfoClass.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server;
 
 import java.lang.annotation.Annotation;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityInfo;
 import org.apache.hadoop.security.token.TokenInfo;
@@ -28,7 +29,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 public class RMNMSecurityInfoClass implements SecurityInfo {
 
   @Override
-  public KerberosInfo getKerborosInfo(Class<?> protocol) {
+  public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
     return new KerberosInfo() {
 
       @Override
@@ -49,7 +50,7 @@ public class RMNMSecurityInfoClass implements SecurityInfo {
   }
 
   @Override
-  public TokenInfo getTokenInfo(Class<?> protocol) {
+  public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
     return null;
   }
 

+ 2 - 1
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java

@@ -40,6 +40,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerState;
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.Dispatcher;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
@@ -152,7 +153,7 @@ public class NodeStatusUpdaterImpl extends AbstractService implements
     InetSocketAddress rmAddress = NetUtils.createSocketAddr(this.rmAddress);
     Configuration rmClientConf = new Configuration(getConfig());
     rmClientConf.setClass(
-        CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+        YarnConfiguration.YARN_SECURITY_INFO,
         RMNMSecurityInfoClass.class, SecurityInfo.class);
     return (ResourceTracker) rpc.getProxy(ResourceTracker.class, rmAddress,
         rmClientConf);

+ 2 - 1
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java

@@ -50,6 +50,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.ContainerStatus;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.AsyncDispatcher;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
@@ -206,7 +207,7 @@ public class ContainerManagerImpl extends CompositeService implements
           this.nodeStatusUpdater.getRMNMSharedSecret());
     }
     Configuration cmConf = new Configuration(getConfig());
-    cmConf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+    cmConf.setClass(YarnConfiguration.YARN_SECURITY_INFO,
         ContainerManagerSecurityInfo.class, SecurityInfo.class);
     server =
         rpc.getServer(ContainerManager.class, this, cmBindAddressStr, cmConf,

+ 2 - 1
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java

@@ -51,6 +51,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.yarn.api.records.LocalResource;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -118,7 +119,7 @@ public class ContainerLocalizer {
     YarnRPC rpc = YarnRPC.create(localizerConf);
     if (UserGroupInformation.isSecurityEnabled()) {
       localizerConf.setClass(
-          CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+          YarnConfiguration.YARN_SECURITY_INFO,
           LocalizerSecurityInfo.class, SecurityInfo.class);
     }
     return (LocalizationProtocol)

+ 2 - 1
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java

@@ -46,6 +46,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.LocalResource;
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 
 import static org.apache.hadoop.fs.CreateFlag.CREATE;
 import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;
@@ -236,7 +237,7 @@ public class ResourceLocalizationService extends AbstractService
                                                          // sec-info classes
     LocalizerTokenSecretManager secretManager = null;
     if (UserGroupInformation.isSecurityEnabled()) {
-      conf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+      conf.setClass(YarnConfiguration.YARN_SECURITY_INFO,
           LocalizerSecurityInfo.class, SecurityInfo.class);
       secretManager = new LocalizerTokenSecretManager();
     }

+ 3 - 2
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/security/LocalizerSecurityInfo.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sec
 
 import java.lang.annotation.Annotation;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityInfo;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -29,12 +30,12 @@ import org.apache.hadoop.security.token.TokenSelector;
 public class LocalizerSecurityInfo implements SecurityInfo {
 
   @Override
-  public KerberosInfo getKerborosInfo(Class<?> protocol) {
+  public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
     return null;
   }
 
   @Override
-  public TokenInfo getTokenInfo(Class<?> protocol) {
+  public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
     return new TokenInfo() {
 
       @Override

+ 2 - 1
mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java

@@ -44,6 +44,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.api.protocolrecords.Refresh
 import org.apache.hadoop.yarn.server.resourcemanager.api.protocolrecords.RefreshSuperUserGroupsConfigurationResponse;
 import org.apache.hadoop.yarn.server.resourcemanager.api.protocolrecords.RefreshUserToGroupsMappingsRequest;
 import org.apache.hadoop.yarn.server.resourcemanager.api.protocolrecords.RefreshUserToGroupsMappingsResponse;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -93,7 +94,7 @@ public class AdminService extends AbstractService implements RMAdminProtocol {
     YarnRPC rpc = YarnRPC.create(getConfig());
     Configuration serverConf = new Configuration(getConfig());
     serverConf.setClass(
-        CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+        YarnConfiguration.YARN_SECURITY_INFO,
         SchedulerSecurityInfo.class, SecurityInfo.class);
     this.server =
       rpc.getServer(RMAdminProtocol.class, this, masterServiceAddress,

+ 1 - 1
mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java

@@ -105,7 +105,7 @@ AMRMProtocol, EventHandler<ASMEvent<ApplicationTrackerEventType>> {
     YarnRPC rpc = YarnRPC.create(getConfig());
     Configuration serverConf = new Configuration(getConfig());
     serverConf.setClass(
-        CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+        YarnConfiguration.YARN_SECURITY_INFO,
         SchedulerSecurityInfo.class, SecurityInfo.class);
     this.server =
       rpc.getServer(AMRMProtocol.class, this, masterServiceAddress,

+ 1 - 1
mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java

@@ -144,7 +144,7 @@ public class ClientRMService extends AbstractService implements
     YarnRPC rpc = YarnRPC.create(getConfig());
     Configuration clientServerConf = new Configuration(getConfig());
     clientServerConf.setClass(
-        CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+        YarnConfiguration.YARN_SECURITY_INFO,
         ClientRMSecurityInfo.class, SecurityInfo.class);
     this.server =   
       rpc.getServer(ClientRMProtocol.class, this,

+ 2 - 1
mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java

@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.SecurityInfo;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -83,7 +84,7 @@ implements ResourceTracker{
     YarnRPC rpc = YarnRPC.create(getConfig());
     Configuration rtServerConf = new Configuration(getConfig());
     rtServerConf.setClass(
-        CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+        YarnConfiguration.YARN_SECURITY_INFO,
         RMNMSecurityInfoClass.class, SecurityInfo.class);
     this.server =
       rpc.getServer(ResourceTracker.class, this, resourceTrackerAddress,

+ 1 - 1
mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/AMLauncher.java

@@ -97,7 +97,7 @@ public class AMLauncher implements Runnable {
     this.applicationTokenSecretManager = applicationTokenSecretManager;
     this.clientToAMSecretManager = clientToAMSecretManager;
     this.conf.setClass(
-        CommonConfigurationKeysPublic.HADOOP_SECURITY_INFO_CLASS_NAME,
+        YarnConfiguration.YARN_SECURITY_INFO,
         ContainerManagerSecurityInfo.class, SecurityInfo.class);
     this.eventType = eventType;
     this.handler = asmContext.getDispatcher().getEventHandler();

+ 1 - 1
mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/tools/RMAdmin.java

@@ -133,7 +133,7 @@ public class RMAdmin extends Configured implements Tool {
     final YarnRPC rpc = YarnRPC.create(conf);
     
     if (UserGroupInformation.isSecurityEnabled()) {
-      conf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_INFO_CLASS_NAME,
+      conf.setClass(YarnConfiguration.YARN_SECURITY_INFO,
           AdminSecurityInfo.class, SecurityInfo.class);
     }
     

+ 2 - 2
mapreduce/yarn/yarn-server/yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerTokenSecretManager.java

@@ -196,7 +196,7 @@ public class TestContainerTokenSecretManager {
     currentUser.addToken(appToken);
 
     conf.setClass(
-        CommonConfigurationKeysPublic.HADOOP_SECURITY_INFO_CLASS_NAME,
+        YarnConfiguration.YARN_SECURITY_INFO,
         SchedulerSecurityInfo.class, SecurityInfo.class);
     AMRMProtocol scheduler =
         currentUser.doAs(new PrivilegedAction<AMRMProtocol>() {
@@ -269,7 +269,7 @@ public class TestContainerTokenSecretManager {
                 containerToken.getService()));
     currentUser.addToken(token);
     conf.setClass(
-        CommonConfigurationKeysPublic.HADOOP_SECURITY_INFO_CLASS_NAME,
+        YarnConfiguration.YARN_SECURITY_INFO,
         ContainerManagerSecurityInfo.class, SecurityInfo.class);
     currentUser.doAs(new PrivilegedAction<Void>() {
       @Override