Explorar o código

HDFS-8113. Add check for null BlockCollection pointers in BlockInfoContiguous structures (Chengbing Liu via Colin P. McCabe)

(cherry picked from commit f523e963e4d88e4e459352387c6efeab59e7a809)
Colin Patrick Mccabe %!s(int64=10) %!d(string=hai) anos
pai
achega
ab80a572be

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -200,6 +200,9 @@ Release 2.8.0 - UNRELEASED
     HDFS-8284. Update documentation about how to use HTrace with HDFS (Masatake
     Iwasaki via Colin P. McCabe)
 
+    HDFS-8113. Add check for null BlockCollection pointers in
+    BlockInfoContiguous structures (Chengbing Liu via Colin P. McCabe)
+
   OPTIMIZATIONS
 
     HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoContiguous.java

@@ -77,7 +77,8 @@ public class BlockInfoContiguous extends Block
    * @param from BlockInfo to copy from.
    */
   protected BlockInfoContiguous(BlockInfoContiguous from) {
-    this(from, from.bc.getBlockReplication());
+    super(from);
+    this.triplets = new Object[from.triplets.length];
     this.bc = from.bc;
   }
 

+ 11 - 0
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockInfo.java

@@ -69,6 +69,17 @@ public class TestBlockInfo {
     Assert.assertEquals(storage, blockInfo.getStorageInfo(0));
   }
 
+  @Test
+  public void testCopyConstructor() {
+    BlockInfoContiguous old = new BlockInfoContiguous((short) 3);
+    try {
+      BlockInfoContiguous copy = new BlockInfoContiguous(old);
+      assertEquals(old.getBlockCollection(), copy.getBlockCollection());
+      assertEquals(old.getCapacity(), copy.getCapacity());
+    } catch (Exception e) {
+      Assert.fail("Copy constructor throws exception: " + e);
+    }
+  }
 
   @Test
   public void testReplaceStorage() throws Exception {