瀏覽代碼

HDFS-10437. ReconfigurationProtocol not covered by HDFSPolicyProvider. (Arpit Agarwal)

Arpit Agarwal 9 年之前
父節點
當前提交
bd33cc0179

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java

@@ -179,6 +179,9 @@ public class CommonConfigurationKeys extends CommonConfigurationKeysPublic {
   public static final String
       HADOOP_SECURITY_SERVICE_AUTHORIZATION_DATANODE_LIFELINE =
           "security.datanode.lifeline.protocol.acl";
+  public static final String
+      HADOOP_SECURITY_SERVICE_AUTHORIZATION_RECONFIGURATION =
+      "security.reconfiguration.protocol.acl";
   public static final String 
   SECURITY_HA_SERVICE_PROTOCOL_ACL = "security.ha.service.protocol.acl";
   public static final String 

+ 5 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java

@@ -23,6 +23,7 @@ import org.apache.hadoop.ha.HAServiceProtocol;
 import org.apache.hadoop.ha.ZKFCProtocol;
 import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.apache.hadoop.hdfs.protocol.ReconfigurationProtocol;
 import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocol;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeLifelineProtocol;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol;
@@ -80,7 +81,10 @@ public class HDFSPolicyProvider extends PolicyProvider {
         TraceAdminProtocol.class),
     new Service(
         CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DATANODE_LIFELINE,
-        DatanodeLifelineProtocol.class)
+        DatanodeLifelineProtocol.class),
+    new Service(
+          CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_RECONFIGURATION,
+          ReconfigurationProtocol.class)
   };
   
   @Override

+ 14 - 17
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSPolicyProvider.java

@@ -23,10 +23,12 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Comparator;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
+import com.google.common.collect.Sets;
 import org.apache.commons.lang.ClassUtils;
-import org.apache.hadoop.hdfs.protocol.ReconfigurationProtocol;
 import org.apache.hadoop.hdfs.qjournal.server.JournalNodeRpcServer;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
@@ -57,7 +59,7 @@ public class TestHDFSPolicyProvider {
   private static final Logger LOG =
       LoggerFactory.getLogger(TestHDFSPolicyProvider.class);
 
-  private static List<Class<?>> policyProviderProtocols;
+  private static Set<Class<?>> policyProviderProtocols;
 
   private static final Comparator<Class<?>> CLASS_NAME_COMPARATOR =
       new Comparator<Class<?>>() {
@@ -75,11 +77,10 @@ public class TestHDFSPolicyProvider {
   @BeforeClass
   public static void initialize() {
     Service[] services = new HDFSPolicyProvider().getServices();
-    policyProviderProtocols = new ArrayList<>(services.length);
+    policyProviderProtocols = new HashSet<>(services.length);
     for (Service service : services) {
       policyProviderProtocols.add(service.getProtocol());
     }
-    Collections.sort(policyProviderProtocols, CLASS_NAME_COMPARATOR);
   }
 
   public TestHDFSPolicyProvider(Class<?> rpcServerClass) {
@@ -98,29 +99,25 @@ public class TestHDFSPolicyProvider {
   @Test
   public void testPolicyProviderForServer() {
     List<?> ifaces = ClassUtils.getAllInterfaces(rpcServerClass);
-    List<Class<?>> serverProtocols = new ArrayList<>(ifaces.size());
+    Set<Class<?>> serverProtocols = new HashSet<>(ifaces.size());
     for (Object obj : ifaces) {
       Class<?> iface = (Class<?>)obj;
-      // ReconfigurationProtocol is not covered in HDFSPolicyProvider
-      // currently, so we have a special case to skip it.  This needs follow-up
-      // investigation.
-      if (iface.getSimpleName().endsWith("Protocol") &&
-          iface != ReconfigurationProtocol.class) {
+      if (iface.getSimpleName().endsWith("Protocol")) {
         serverProtocols.add(iface);
       }
     }
-    Collections.sort(serverProtocols, CLASS_NAME_COMPARATOR);
     LOG.info("Running test {} for RPC server {}.  Found server protocols {} "
         + "and policy provider protocols {}.", testName.getMethodName(),
         rpcServerClass.getName(), serverProtocols, policyProviderProtocols);
     assertFalse("Expected to find at least one protocol in server.",
         serverProtocols.isEmpty());
+    final Set<Class<?>> differenceSet =
+        Sets.difference(serverProtocols, policyProviderProtocols);
     assertTrue(
-        String.format("Expected all protocols for server %s to be defined in "
-            + "%s.  Server contains protocols %s.  Policy provider contains "
-            + "protocols %s.", rpcServerClass.getName(),
-            HDFSPolicyProvider.class.getName(), serverProtocols,
-            policyProviderProtocols),
-        policyProviderProtocols.containsAll(serverProtocols));
+        String.format("Following protocols for server %s are not defined in "
+            + "%s: %s",
+            rpcServerClass.getName(), HDFSPolicyProvider.class.getName(),
+            Arrays.toString(differenceSet.toArray())),
+        differenceSet.isEmpty());
   }
 }