Browse Source

HADOOP-6951. Distinct minicluster services (e.g. NN and JT) overwrite each other's service policies. Contributed by Aaron T. Myers.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1002896 13f79535-47bb-0310-9956-ffa450edef68
Thomas White 14 năm trước cách đây
mục cha
commit
642ed17a48

+ 3 - 0
CHANGES.txt

@@ -250,6 +250,9 @@ Trunk (unreleased changes)
     HADOOP-6940. RawLocalFileSystem's markSupported method misnamed markSupport.
     (Tom White via eli).
 
+    HADOOP-6951.  Distinct minicluster services (e.g. NN and JT) overwrite each
+    other's service policies.  (Aaron T. Myers via tomwhite)
+
 Release 0.21.0 - Unreleased
 
   INCOMPATIBLE CHANGES

+ 20 - 1
src/java/org/apache/hadoop/ipc/Server.java

@@ -60,6 +60,7 @@ import javax.security.sasl.SaslServer;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.BytesWritable;
@@ -78,6 +79,7 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.authorize.AuthorizationException;
+import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.SecretManager;
@@ -182,6 +184,7 @@ public abstract class Server {
   
   private Configuration conf;
   private SecretManager<TokenIdentifier> secretManager;
+  private ServiceAuthorizationManager serviceAuthorizationManager = new ServiceAuthorizationManager();
 
   private int maxQueueSize;
   private final int maxRespSize;
@@ -239,6 +242,22 @@ public abstract class Server {
     return rpcMetrics;
   }
 
+  /**
+   * Refresh the service authorization ACL for the service handled by this server.
+   */
+  public void refreshServiceAcl(Configuration conf, PolicyProvider provider) {
+    serviceAuthorizationManager.refresh(conf, provider);
+  }
+
+  /**
+   * Returns a handle to the serviceAuthorizationManager (required in tests)
+   * @return instance of ServiceAuthorizationManager for this server
+   */
+  @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+  public ServiceAuthorizationManager getServiceAuthorizationManager() {
+    return serviceAuthorizationManager;
+  }
+
   /** A call queued for handling. */
   private static class Call {
     private int id;                               // the client's call id
@@ -1652,7 +1671,7 @@ public abstract class Server {
         throw new AuthorizationException("Unknown protocol: " + 
                                          connection.getProtocol());
       }
-      ServiceAuthorizationManager.authorize(user, protocol, getConf(), hostname);
+      serviceAuthorizationManager.authorize(user, protocol, getConf(), hostname);
     }
   }
   

+ 9 - 3
src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.security.authorize;
 import java.io.IOException;
 import java.util.IdentityHashMap;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -43,7 +44,7 @@ public class ServiceAuthorizationManager {
   private static final Log LOG = LogFactory
   .getLog(ServiceAuthorizationManager.class);
 
-  private static Map<Class<?>, AccessControlList> protocolToAcl =
+  private Map<Class<?>, AccessControlList> protocolToAcl =
     new IdentityHashMap<Class<?>, AccessControlList>();
   
   /**
@@ -73,7 +74,7 @@ public class ServiceAuthorizationManager {
    * @param hostname fully qualified domain name of the client
    * @throws AuthorizationException on authorization failure
    */
-  public static void authorize(UserGroupInformation user, 
+  public void authorize(UserGroupInformation user, 
                                Class<?> protocol,
                                Configuration conf,
                                String hostname
@@ -129,7 +130,7 @@ public class ServiceAuthorizationManager {
     AUDITLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol);
   }
 
-  public static synchronized void refresh(Configuration conf,
+  public synchronized void refresh(Configuration conf,
                                           PolicyProvider provider) {
     // Get the system property 'hadoop.policy.file'
     String policyFile = 
@@ -158,4 +159,9 @@ public class ServiceAuthorizationManager {
     // Flip to the newly parsed permissions
     protocolToAcl = newAcls;
   }
+
+  // Package-protected for use in tests.
+  Set<Class<?>> getProtocolsWithAcls() {
+    return protocolToAcl.keySet();
+  }
 }

+ 2 - 3
src/test/core/org/apache/hadoop/ipc/TestRPC.java

@@ -41,7 +41,6 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.authorize.Service;
-import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
 import org.apache.hadoop.security.AccessControlException;
 
 import static org.mockito.Mockito.*;
@@ -364,11 +363,11 @@ public class TestRPC extends TestCase {
   }
   
   private void doRPCs(Configuration conf, boolean expectFailure) throws Exception {
-    ServiceAuthorizationManager.refresh(conf, new TestPolicyProvider());
-    
     Server server = RPC.getServer(TestProtocol.class,
                                   new TestImpl(), ADDRESS, 0, 5, true, conf, null);
 
+    server.refreshServiceAcl(conf, new TestPolicyProvider());
+
     TestProtocol proxy = null;
 
     server.start();