Browse Source

svn merge -c 1195731 from trunk for HDFS-2065.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1195736 13f79535-47bb-0310-9956-ffa450edef68
Tsz-wo Sze 13 năm trước cách đây
mục cha
commit
802e6a10e8

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -1165,6 +1165,9 @@ Release 0.23.0 - Unreleased
     getFileStatus on non-existing files; fix bugs in getBlockLocations; and
     changed getFileChecksum json response root to "FileChecksum".  (szetszwo)
 
+    HDFS-2065. Add null checks in DFSClient.getFileChecksum(..).  (Uma
+    Maheswara Rao G via szetszwo)
+
   BREAKDOWN OF HDFS-1073 SUBTASKS
 
     HDFS-1521. Persist transaction ID on disk between NN restarts.

+ 10 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java

@@ -1088,8 +1088,11 @@ public class DFSClient implements java.io.Closeable {
       ClientProtocol namenode, SocketFactory socketFactory, int socketTimeout
       ) throws IOException {
     //get all block locations
-    List<LocatedBlock> locatedblocks
-        = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE).getLocatedBlocks();
+    LocatedBlocks blockLocations = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE);
+    if (null == blockLocations) {
+      throw new FileNotFoundException("File does not exist: " + src);
+    }
+    List<LocatedBlock> locatedblocks = blockLocations.getLocatedBlocks();
     final DataOutputBuffer md5out = new DataOutputBuffer();
     int bytesPerCRC = 0;
     long crcPerBlock = 0;
@@ -1099,8 +1102,11 @@ public class DFSClient implements java.io.Closeable {
     //get block checksum for each block
     for(int i = 0; i < locatedblocks.size(); i++) {
       if (refetchBlocks) {  // refetch to get fresh tokens
-        locatedblocks = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE)
-            .getLocatedBlocks();
+        blockLocations = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE);
+        if (null == blockLocations) {
+          throw new FileNotFoundException("File does not exist: " + src);
+        }
+        locatedblocks = blockLocations.getLocatedBlocks();
         refetchBlocks = false;
       }
       LocatedBlock lb = locatedblocks.get(i);

+ 19 - 0
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java

@@ -412,6 +412,25 @@ public class TestDistributedFileSystem {
     final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
         current.getShortUserName() + "x", new String[]{"user"});
     
+    try {
+      ((DistributedFileSystem) hdfs).getFileChecksum(new Path(
+          "/test/TestNonExistingFile"));
+      fail("Expecting FileNotFoundException");
+    } catch (FileNotFoundException e) {
+      assertTrue("Not throwing the intended exception message", e.getMessage()
+          .contains("File does not exist: /test/TestNonExistingFile"));
+    }
+
+    try {
+      Path path = new Path("/test/TestExistingDir/");
+      hdfs.mkdirs(path);
+      ((DistributedFileSystem) hdfs).getFileChecksum(path);
+      fail("Expecting FileNotFoundException");
+    } catch (FileNotFoundException e) {
+      assertTrue("Not throwing the intended exception message", e.getMessage()
+          .contains("File does not exist: /test/TestExistingDir"));
+    }
+    
     //hftp
     final String hftpuri = "hftp://" + nnAddr;
     System.out.println("hftpuri=" + hftpuri);