Jelajahi Sumber

commit a5b6843a017386d713fcdac2174b1019c48e2a7e
Author: Devaraj Das <ddas@yahoo-inc.com>
Date: Fri May 14 22:05:38 2010 -0700

HDFS:1130 from https://issues.apache.org/jira/secure/attachment/12444565/hdfs-1130.3.patch

+++ b/YAHOO-CHANGES.txt
+ HDFS-1130. Adds a configuration dfs.cluster.administrators for
+ controlling access to the default servlets in hdfs. (ddas)
+


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.20-security-patches@1077466 13f79535-47bb-0310-9956-ffa450edef68

Owen O'Malley 14 tahun lalu
induk
melakukan
3f7f2ef636

+ 21 - 0
src/core/org/apache/hadoop/security/SecurityUtil.java

@@ -31,6 +31,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.authorize.AccessControlList;
 
 import sun.security.jgss.krb5.Krb5Util;
 import sun.security.krb5.Credentials;
@@ -214,4 +215,24 @@ public class SecurityUtil {
     sb.append(NetUtils.normalizeHostName(uri.getHost())).append(":").append(port);
     return sb.toString();
   }
+  
+  /**
+   * Get the ACL object representing the cluster administrators
+   * The user who starts the daemon is automatically added as an admin
+   * @param conf
+   * @param configKey the key that holds the ACL string in its value
+   * @return AccessControlList instance
+   */
+  public static AccessControlList getAdminAcls(Configuration conf, 
+      String configKey) {
+    try {
+      AccessControlList adminAcl = 
+        new AccessControlList(conf.get(configKey, " "));
+      adminAcl.addUser(UserGroupInformation.getCurrentUser().
+                       getShortUserName());
+      return adminAcl;
+    } catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+  }
 }

+ 1 - 0
src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java

@@ -82,6 +82,7 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   public static final String  DFS_PERMISSIONS_ENABLED_KEY = "dfs.permissions.enabled";
   public static final boolean DFS_PERMISSIONS_ENABLED_DEFAULT = true;
   public static final String  DFS_PERMISSIONS_SUPERUSERGROUP_KEY = "dfs.permissions.superusergroup";
+  public static final String  DFS_ADMIN = "dfs.cluster.administrators";
   public static final String  DFS_PERMISSIONS_SUPERUSERGROUP_DEFAULT = "supergroup";
   public static final String  DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY = "dfs.https.server.keystore.resource";
   public static final String  DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT = "ssl-server.xml";

+ 4 - 2
src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java

@@ -399,8 +399,10 @@ public class DataNode extends Configured
     String infoHost = infoSocAddr.getHostName();
     int tmpInfoPort = infoSocAddr.getPort();
     this.infoServer = (secureResources == null) 
-       ? new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0, conf)
-       : new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0, conf, 
+       ? new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0, 
+           conf, SecurityUtil.getAdminAcls(conf, DFSConfigKeys.DFS_ADMIN))
+       : new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0,
+           conf, SecurityUtil.getAdminAcls(conf, DFSConfigKeys.DFS_ADMIN),
            secureResources.getListener());
     if (conf.getBoolean("dfs.https.enable", false)) {
       boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false);

+ 2 - 2
src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java

@@ -26,7 +26,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.permission.PermissionStatus;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HDFSPolicyProvider;
 import org.apache.hadoop.hdfs.protocol.*;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
@@ -266,7 +265,8 @@ public class NameNode implements ClientProtocol, DatanodeProtocol,
           String infoHost = infoSocAddr.getHostName();
           int infoPort = infoSocAddr.getPort();
           httpServer = new HttpServer("hdfs", infoHost, infoPort, 
-              infoPort == 0, conf);
+              infoPort == 0, conf, 
+              SecurityUtil.getAdminAcls(conf, DFSConfigKeys.DFS_ADMIN));
           
           boolean certSSL = conf.getBoolean("dfs.https.enable", false);
           boolean useKrb = UserGroupInformation.isSecurityEnabled();

+ 2 - 1
src/hdfs/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java

@@ -196,7 +196,8 @@ public class SecondaryNameNode implements Runnable {
 
           int tmpInfoPort = infoSocAddr.getPort();
           infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort,
-              tmpInfoPort == 0, conf);
+              tmpInfoPort == 0, conf, 
+              SecurityUtil.getAdminAcls(conf, DFSConfigKeys.DFS_ADMIN));
           
           if(UserGroupInformation.isSecurityEnabled()) {
             System.setProperty("https.cipherSuites",