Browse Source

HDFS-3284. bootstrapStandby fails in secure cluster. Contributed by Todd Lipcon.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1326813 13f79535-47bb-0310-9956-ffa450edef68
Todd Lipcon 13 years ago
parent
commit
574f99bd6b

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -522,6 +522,8 @@ Release 2.0.0 - UNRELEASED
     HDFS-3268. FileContext API mishandles token service and incompatible with
     HA (Daryn Sharp via todd)
 
+    HDFS-3284. bootstrapStandby fails in secure cluster (todd)
+
   BREAKDOWN OF HDFS-1623 SUBTASKS
 
     HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd)

+ 4 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BootstrapStandby.java

@@ -51,6 +51,7 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.namenode.TransferFsImage;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
 import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
+import org.apache.hadoop.hdfs.tools.DFSHAAdmin;
 import org.apache.hadoop.hdfs.tools.NNHAServiceTarget;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.MD5Hash;
@@ -144,8 +145,8 @@ public class BootstrapStandby implements Tool, Configurable {
   
   private HAServiceProtocol createHAProtocolProxy()
       throws IOException {
-    return new NNHAServiceTarget(new HdfsConfiguration(conf),
-        nsId, otherNNId).getProxy(conf, 15000);
+    return new NNHAServiceTarget(new HdfsConfiguration(conf), nsId, otherNNId)
+        .getProxy(conf, 15000);
   }
 
   private int doRun() throws IOException {
@@ -334,7 +335,7 @@ public class BootstrapStandby implements Tool, Configurable {
 
   @Override
   public void setConf(Configuration conf) {
-    this.conf = conf;
+    this.conf = DFSHAAdmin.addSecurityConfiguration(conf);
   }
 
   @Override

+ 22 - 11
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSHAAdmin.java

@@ -46,21 +46,32 @@ public class DFSHAAdmin extends HAAdmin {
   @Override
   public void setConf(Configuration conf) {
     if (conf != null) {
-      // Make a copy so we don't mutate it. Also use an HdfsConfiguration to
-      // force loading of hdfs-site.xml.
-      conf = new HdfsConfiguration(conf);
-      String nameNodePrincipal = conf.get(
-          DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "");
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("Using NN principal: " + nameNodePrincipal);
-      }
-
-      conf.set(CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY,
-          nameNodePrincipal);
+      conf = addSecurityConfiguration(conf);
     }
     super.setConf(conf);
   }
 
+  /**
+   * Add the requisite security principal settings to the given Configuration,
+   * returning a copy.
+   * @param conf the original config
+   * @return a copy with the security settings added
+   */
+  public static Configuration addSecurityConfiguration(Configuration conf) {
+    // Make a copy so we don't mutate it. Also use an HdfsConfiguration to
+    // force loading of hdfs-site.xml.
+    conf = new HdfsConfiguration(conf);
+    String nameNodePrincipal = conf.get(
+        DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "");
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Using NN principal: " + nameNodePrincipal);
+    }
+
+    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY,
+        nameNodePrincipal);
+    return conf;
+  }
+
   /**
    * Try to map the given namenode ID to its service address.
    */