Browse Source

HDFS-3003. Remove getHostPortString() from NameNode, replace it with NetUtils.getHostPortString(). Contributed by Brandon Li.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1293338 13f79535-47bb-0310-9956-ffa450edef68
Aaron Myers 13 years ago
parent
commit
62c7e2edfc
14 changed files with 35 additions and 32 deletions
  1. 4 1
      hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
  2. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
  3. 5 5
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java
  4. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
  5. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
  6. 4 11
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
  7. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java
  8. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
  9. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java
  10. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestBackupNode.java
  11. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java
  12. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
  13. 3 2
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestTransferFsImage.java
  14. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java

+ 4 - 1
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -133,7 +133,10 @@ Trunk (unreleased changes)
     (todd)
     (todd)
 
 
     HDFS-2655. BlockReaderLocal#skip performs unnecessary IO. (Brandon Li
     HDFS-2655. BlockReaderLocal#skip performs unnecessary IO. (Brandon Li
-    via jitendra) 
+    via jitendra)
+
+    HDFS-3003. Remove getHostPortString() from NameNode, replace it with
+    NetUtils.getHostPortString(). (Brandon Li via atm)
 
 
   OPTIMIZATIONS
   OPTIMIZATIONS
     HDFS-2477. Optimize computing the diff between a block report and the
     HDFS-2477. Optimize computing the diff between a block report and the

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java

@@ -425,7 +425,7 @@ public class DFSUtil {
     // Use default address as fall back
     // Use default address as fall back
     String defaultAddress;
     String defaultAddress;
     try {
     try {
-      defaultAddress = NameNode.getHostPortString(NameNode.getAddress(conf));
+      defaultAddress = NetUtils.getHostPortString(NameNode.getAddress(conf));
     } catch (IllegalArgumentException e) {
     } catch (IllegalArgumentException e) {
       defaultAddress = null;
       defaultAddress = null;
     }
     }

+ 5 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java

@@ -107,13 +107,13 @@ public class BackupNode extends NameNode {
   @Override // NameNode
   @Override // NameNode
   protected void setRpcServerAddress(Configuration conf,
   protected void setRpcServerAddress(Configuration conf,
       InetSocketAddress addr) {
       InetSocketAddress addr) {
-    conf.set(BN_ADDRESS_NAME_KEY, getHostPortString(addr));
+    conf.set(BN_ADDRESS_NAME_KEY, NetUtils.getHostPortString(addr));
   }
   }
   
   
   @Override // Namenode
   @Override // Namenode
   protected void setRpcServiceServerAddress(Configuration conf,
   protected void setRpcServiceServerAddress(Configuration conf,
       InetSocketAddress addr) {
       InetSocketAddress addr) {
-    conf.set(BN_SERVICE_RPC_ADDRESS_KEY,  getHostPortString(addr));
+    conf.set(BN_SERVICE_RPC_ADDRESS_KEY, NetUtils.getHostPortString(addr));
   }
   }
 
 
   @Override // NameNode
   @Override // NameNode
@@ -125,7 +125,7 @@ public class BackupNode extends NameNode {
   
   
   @Override // NameNode
   @Override // NameNode
   protected void setHttpServerAddress(Configuration conf){
   protected void setHttpServerAddress(Configuration conf){
-    conf.set(BN_HTTP_ADDRESS_NAME_KEY, getHostPortString(getHttpAddress()));
+    conf.set(BN_HTTP_ADDRESS_NAME_KEY, NetUtils.getHostPortString(getHttpAddress()));
   }
   }
 
 
   @Override // NameNode
   @Override // NameNode
@@ -307,8 +307,8 @@ public class BackupNode extends NameNode {
     InetSocketAddress nnAddress = NameNode.getServiceAddress(conf, true);
     InetSocketAddress nnAddress = NameNode.getServiceAddress(conf, true);
     this.namenode = new NamenodeProtocolTranslatorPB(nnAddress, conf,
     this.namenode = new NamenodeProtocolTranslatorPB(nnAddress, conf,
         UserGroupInformation.getCurrentUser());
         UserGroupInformation.getCurrentUser());
-    this.nnRpcAddress = getHostPortString(nnAddress);
-    this.nnHttpAddress = getHostPortString(super.getHttpServerAddress(conf));
+    this.nnRpcAddress = NetUtils.getHostPortString(nnAddress);
+    this.nnHttpAddress = NetUtils.getHostPortString(super.getHttpServerAddress(conf));
     // get version and id info from the name-node
     // get version and id info from the name-node
     NamespaceInfo nsInfo = null;
     NamespaceInfo nsInfo = null;
     while(!isStopRequested()) {
     while(!isStopRequested()) {

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java

@@ -71,7 +71,7 @@ public class FileChecksumServlets {
         String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
         String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
         dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
         dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
       }
       }
-      String addr = NameNode.getHostPortString(nn.getNameNodeAddress());
+      String addr = NetUtils.getHostPortString(nn.getNameNodeAddress());
       String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
       String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
 
 
       return new URL(scheme, hostname, port, 
       return new URL(scheme, hostname, port, 

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java

@@ -33,6 +33,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ServletUtil;
 import org.apache.hadoop.util.ServletUtil;
 
 
@@ -72,7 +73,7 @@ public class FileDataServlet extends DfsServlet {
     // Add namenode address to the url params
     // Add namenode address to the url params
     NameNode nn = NameNodeHttpServer.getNameNodeFromContext(
     NameNode nn = NameNodeHttpServer.getNameNodeFromContext(
         getServletContext());
         getServletContext());
-    String addr = NameNode.getHostPortString(nn.getNameNodeAddress());
+    String addr = NetUtils.getHostPortString(nn.getNameNodeAddress());
     String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
     String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
     
     
     return new URL(scheme, hostname, port,
     return new URL(scheme, hostname, port,

+ 4 - 11
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java

@@ -236,13 +236,6 @@ public class NameNode {
         + namenode.getHostName()+portString);
         + namenode.getHostName()+portString);
   }
   }
 
 
-  /**
-   * Compose a "host:port" string from the address.
-   */
-  public static String getHostPortString(InetSocketAddress addr) {
-    return addr.getHostName() + ":" + addr.getPort();
-  }
-
   //
   //
   // Common NameNode methods implementation for the active name-node role.
   // Common NameNode methods implementation for the active name-node role.
   //
   //
@@ -273,7 +266,7 @@ public class NameNode {
    */
    */
   protected void setRpcServiceServerAddress(Configuration conf,
   protected void setRpcServiceServerAddress(Configuration conf,
       InetSocketAddress serviceRPCAddress) {
       InetSocketAddress serviceRPCAddress) {
-    setServiceAddress(conf, getHostPortString(serviceRPCAddress));
+    setServiceAddress(conf, NetUtils.getHostPortString(serviceRPCAddress));
   }
   }
 
 
   protected void setRpcServerAddress(Configuration conf,
   protected void setRpcServerAddress(Configuration conf,
@@ -293,7 +286,7 @@ public class NameNode {
   
   
   protected void setHttpServerAddress(Configuration conf) {
   protected void setHttpServerAddress(Configuration conf) {
     conf.set(DFS_NAMENODE_HTTP_ADDRESS_KEY,
     conf.set(DFS_NAMENODE_HTTP_ADDRESS_KEY,
-        getHostPortString(getHttpAddress()));
+        NetUtils.getHostPortString(getHttpAddress()));
   }
   }
 
 
   protected void loadNamesystem(Configuration conf) throws IOException {
   protected void loadNamesystem(Configuration conf) throws IOException {
@@ -306,8 +299,8 @@ public class NameNode {
 
 
   NamenodeRegistration setRegistration() {
   NamenodeRegistration setRegistration() {
     nodeRegistration = new NamenodeRegistration(
     nodeRegistration = new NamenodeRegistration(
-        getHostPortString(rpcServer.getRpcAddress()),
-        getHostPortString(getHttpAddress()),
+        NetUtils.getHostPortString(rpcServer.getRpcAddress()),
+        NetUtils.getHostPortString(getHttpAddress()),
         getFSImage().getStorage(), getRole());
         getFSImage().getStorage(), getRole());
     return nodeRegistration;
     return nodeRegistration;
   }
   }

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java

@@ -51,6 +51,7 @@ import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
 import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
 import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.NodeBase;
 import org.apache.hadoop.net.NodeBase;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.Token;
@@ -401,7 +402,7 @@ class NamenodeJspHelper {
       nodeToRedirect = nn.getHttpAddress().getHostName();
       nodeToRedirect = nn.getHttpAddress().getHostName();
       redirectPort = nn.getHttpAddress().getPort();
       redirectPort = nn.getHttpAddress().getPort();
     }
     }
-    String addr = NameNode.getHostPortString(nn.getNameNodeAddress());
+    String addr = NetUtils.getHostPortString(nn.getNameNodeAddress());
     String fqdn = InetAddress.getByName(nodeToRedirect).getCanonicalHostName();
     String fqdn = InetAddress.getByName(nodeToRedirect).getCanonicalHostName();
     redirectLocation = "http://" + fqdn + ":" + redirectPort
     redirectLocation = "http://" + fqdn + ":" + redirectPort
         + "/browseDirectory.jsp?namenodeInfoPort="
         + "/browseDirectory.jsp?namenodeInfoPort="

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java

@@ -620,10 +620,10 @@ public class MiniDFSCluster {
     NameNode nn = createNameNode(nnIndex, conf, numDataNodes, manageNameDfsDirs,
     NameNode nn = createNameNode(nnIndex, conf, numDataNodes, manageNameDfsDirs,
         format, operation, clusterId);
         format, operation, clusterId);
     conf.set(DFSUtil.getNameServiceIdKey(
     conf.set(DFSUtil.getNameServiceIdKey(
-        DFS_NAMENODE_RPC_ADDRESS_KEY, nameserviceId), NameNode
+        DFS_NAMENODE_RPC_ADDRESS_KEY, nameserviceId), NetUtils
         .getHostPortString(nn.getNameNodeAddress()));
         .getHostPortString(nn.getNameNodeAddress()));
     conf.set(DFSUtil.getNameServiceIdKey(
     conf.set(DFSUtil.getNameServiceIdKey(
-        DFS_NAMENODE_HTTP_ADDRESS_KEY, nameserviceId), NameNode
+        DFS_NAMENODE_HTTP_ADDRESS_KEY, nameserviceId), NetUtils
         .getHostPortString(nn.getHttpAddress()));
         .getHostPortString(nn.getHttpAddress()));
     DFSUtil.setGenericConf(conf, nameserviceId, 
     DFSUtil.setGenericConf(conf, nameserviceId, 
         DFS_NAMENODE_HTTP_ADDRESS_KEY);
         DFS_NAMENODE_HTTP_ADDRESS_KEY);
@@ -643,7 +643,7 @@ public class MiniDFSCluster {
    */
    */
   public URI getURI(int nnIndex) {
   public URI getURI(int nnIndex) {
     InetSocketAddress addr = nameNodes[nnIndex].nameNode.getNameNodeAddress();
     InetSocketAddress addr = nameNodes[nnIndex].nameNode.getNameNodeAddress();
-    String hostPort = NameNode.getHostPortString(addr);
+    String hostPort = NetUtils.getHostPortString(addr);
     URI uri = null;
     URI uri = null;
     try {
     try {
       uri = new URI("hdfs://" + hostPort);
       uri = new URI("hdfs://" + hostPort);

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java

@@ -35,6 +35,7 @@ import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.ServletUtil;
 import org.apache.hadoop.util.ServletUtil;
 import org.junit.Test;
 import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.Mockito;
@@ -134,7 +135,7 @@ public class TestDatanodeJsp {
     Mockito.doReturn("100").when(reqMock).getParameter("chunkSizeToView");
     Mockito.doReturn("100").when(reqMock).getParameter("chunkSizeToView");
     Mockito.doReturn("1").when(reqMock).getParameter("startOffset");
     Mockito.doReturn("1").when(reqMock).getParameter("startOffset");
     Mockito.doReturn("1024").when(reqMock).getParameter("blockSize");
     Mockito.doReturn("1024").when(reqMock).getParameter("blockSize");
-    Mockito.doReturn(NameNode.getHostPortString(NameNode.getAddress(CONF)))
+    Mockito.doReturn(NetUtils.getHostPortString(NameNode.getAddress(CONF)))
         .when(reqMock).getParameter("nnaddr");
         .when(reqMock).getParameter("nnaddr");
     Mockito.doReturn(testFile.toString()).when(reqMock).getPathInfo();
     Mockito.doReturn(testFile.toString()).when(reqMock).getPathInfo();
   }
   }

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestBackupNode.java

@@ -39,6 +39,7 @@ import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
 import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
 import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
 import org.apache.hadoop.hdfs.server.namenode.FileJournalManager.EditLogFile;
 import org.apache.hadoop.hdfs.server.namenode.FileJournalManager.EditLogFile;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Level;
 import org.junit.Before;
 import org.junit.Before;
@@ -330,7 +331,7 @@ public class TestBackupNode {
       InetSocketAddress add = backup.getNameNodeAddress();
       InetSocketAddress add = backup.getNameNodeAddress();
       // Write to BN
       // Write to BN
       FileSystem bnFS = FileSystem.get(new Path("hdfs://"
       FileSystem bnFS = FileSystem.get(new Path("hdfs://"
-          + NameNode.getHostPortString(add)).toUri(), conf);
+          + NetUtils.getHostPortString(add)).toUri(), conf);
       boolean canWrite = true;
       boolean canWrite = true;
       try {
       try {
         TestCheckpoint.writeFile(bnFS, file3, replication);
         TestCheckpoint.writeFile(bnFS, file3, replication);

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java

@@ -57,6 +57,7 @@ import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
 import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
 import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
 import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
 import org.apache.hadoop.hdfs.tools.DFSAdmin;
 import org.apache.hadoop.hdfs.tools.DFSAdmin;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.test.GenericTestUtils.DelayAnswer;
 import org.apache.hadoop.test.GenericTestUtils.DelayAnswer;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.StringUtils;
@@ -1432,7 +1433,7 @@ public class TestCheckpoint extends TestCase {
           .format(true).build();
           .format(true).build();
       
       
       NamenodeProtocols nn = cluster.getNameNodeRpc();
       NamenodeProtocols nn = cluster.getNameNodeRpc();
-      String fsName = NameNode.getHostPortString(
+      String fsName = NetUtils.getHostPortString(
           cluster.getNameNode().getHttpAddress());
           cluster.getNameNode().getHttpAddress());
 
 
       // Make a finalized log on the server side. 
       // Make a finalized log on the server side. 

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java

@@ -43,6 +43,7 @@ import org.apache.hadoop.hdfs.DFSInputStream;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
+import org.apache.hadoop.net.NetUtils;
 import org.junit.Test;
 import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.Mockito;
 import org.mortbay.jetty.InclusiveByteRange;
 import org.mortbay.jetty.InclusiveByteRange;
@@ -263,7 +264,7 @@ public class TestStreamFile {
 
 
     Mockito.doReturn(CONF).when(mockServletContext).getAttribute(
     Mockito.doReturn(CONF).when(mockServletContext).getAttribute(
         JspHelper.CURRENT_CONF);
         JspHelper.CURRENT_CONF);
-    Mockito.doReturn(NameNode.getHostPortString(NameNode.getAddress(CONF)))
+    Mockito.doReturn(NetUtils.getHostPortString(NameNode.getAddress(CONF)))
       .when(mockHttpServletRequest).getParameter("nnaddr");
       .when(mockHttpServletRequest).getParameter("nnaddr");
     Mockito.doReturn(testFile.toString()).when(mockHttpServletRequest)
     Mockito.doReturn(testFile.toString()).when(mockHttpServletRequest)
       .getPathInfo();
       .getPathInfo();

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestTransferFsImage.java

@@ -27,6 +27,7 @@ import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.junit.Test;
 import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.Mockito;
@@ -54,7 +55,7 @@ public class TestTransferFsImage {
         new File("/xxxxx-does-not-exist/blah"));
         new File("/xxxxx-does-not-exist/blah"));
        
        
     try {
     try {
-      String fsName = NameNode.getHostPortString(
+      String fsName = NetUtils.getHostPortString(
           cluster.getNameNode().getHttpAddress());
           cluster.getNameNode().getHttpAddress());
       String id = "getimage=1&txid=0";
       String id = "getimage=1&txid=0";
 
 
@@ -86,7 +87,7 @@ public class TestTransferFsImage {
         );
         );
        
        
     try {
     try {
-      String fsName = NameNode.getHostPortString(
+      String fsName = NetUtils.getHostPortString(
           cluster.getNameNode().getHttpAddress());
           cluster.getNameNode().getHttpAddress());
       String id = "getimage=1&txid=0";
       String id = "getimage=1&txid=0";
 
 

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java

@@ -33,10 +33,10 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
 
 
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
-import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.tools.GetConf;
 import org.apache.hadoop.hdfs.tools.GetConf;
 import org.apache.hadoop.hdfs.tools.GetConf.Command;
 import org.apache.hadoop.hdfs.tools.GetConf.Command;
 import org.apache.hadoop.hdfs.tools.GetConf.CommandHandler;
 import org.apache.hadoop.hdfs.tools.GetConf.CommandHandler;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.Test;
 import org.junit.Test;
 
 
@@ -86,7 +86,7 @@ public class TestGetConf {
   private String[] toStringArray(List<InetSocketAddress> list) {
   private String[] toStringArray(List<InetSocketAddress> list) {
     String[] ret = new String[list.size()];
     String[] ret = new String[list.size()];
     for (int i = 0; i < list.size(); i++) {
     for (int i = 0; i < list.size(); i++) {
-      ret[i] = NameNode.getHostPortString(list.get(i));
+      ret[i] = NetUtils.getHostPortString(list.get(i));
     }
     }
     return ret;
     return ret;
   }
   }