Ver código fonte

HDFS-5857. TestWebHDFS#testNamenodeRestart fails intermittently with NPE. Contributed By Mit Desai.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1574683 13f79535-47bb-0310-9956-ffa450edef68
Haohui Mai 11 anos atrás
pai
commit
b46fbd0275

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -683,6 +683,9 @@ Release 2.4.0 - UNRELEASED
     HDFS-6051. HDFS cannot run on Windows since short-circuit shared memory
     segment changes. (cmccabe)
 
+    HDFS-5857. TestWebHDFS#testNamenodeRestart fails intermittently with NPE.
+    (Mit Desai via wheat9)
+
   BREAKDOWN OF HDFS-5698 SUBTASKS AND RELATED JIRAS
 
     HDFS-5717. Save FSImage header in protobuf. (Haohui Mai via jing9)

+ 14 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java

@@ -164,6 +164,15 @@ public class NamenodeWebHdfsMethods {
     response.setContentType(null);
   }
 
+  private static NamenodeProtocols getRPCServer(NameNode namenode)
+      throws IOException {
+     final NamenodeProtocols np = namenode.getRpcServer();
+     if (np == null) {
+       throw new IOException("Namenode is in startup mode");
+     }
+     return np;
+  }
+
   @VisibleForTesting
   static DatanodeInfo chooseDatanode(final NameNode namenode,
       final String path, final HttpOpParam.Op op, final long openOffset,
@@ -188,7 +197,7 @@ public class NamenodeWebHdfsMethods {
         || op == GetOpParam.Op.GETFILECHECKSUM
         || op == PostOpParam.Op.APPEND) {
       //choose a datanode containing a replica 
-      final NamenodeProtocols np = namenode.getRpcServer();
+      final NamenodeProtocols np = getRPCServer(namenode);
       final HdfsFileStatus status = np.getFileInfo(path);
       if (status == null) {
         throw new FileNotFoundException("File " + path + " not found.");
@@ -424,7 +433,7 @@ public class NamenodeWebHdfsMethods {
 
     final Configuration conf = (Configuration)context.getAttribute(JspHelper.CURRENT_CONF);
     final NameNode namenode = (NameNode)context.getAttribute("name.node");
-    final NamenodeProtocols np = namenode.getRpcServer();
+    final NamenodeProtocols np = getRPCServer(namenode);
 
     switch(op.getValue()) {
     case CREATE:
@@ -606,7 +615,7 @@ public class NamenodeWebHdfsMethods {
     }
     case CONCAT:
     {
-      namenode.getRpcServer().concat(fullpath, concatSrcs.getAbsolutePaths());
+      getRPCServer(namenode).concat(fullpath, concatSrcs.getAbsolutePaths());
       return Response.ok().build();
     }
     default:
@@ -696,7 +705,7 @@ public class NamenodeWebHdfsMethods {
       final BufferSizeParam bufferSize
       ) throws IOException, URISyntaxException {
     final NameNode namenode = (NameNode)context.getAttribute("name.node");
-    final NamenodeProtocols np = namenode.getRpcServer();
+    final NamenodeProtocols np = getRPCServer(namenode);
 
     switch(op.getValue()) {
     case OPEN:
@@ -905,7 +914,7 @@ public class NamenodeWebHdfsMethods {
     switch(op.getValue()) {
     case DELETE:
     {
-      final boolean b = namenode.getRpcServer().delete(fullpath, recursive.getValue());
+      final boolean b = getRPCServer(namenode).delete(fullpath, recursive.getValue());
       final String js = JsonUtil.toJsonString("boolean", b);
       return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
     }