瀏覽代碼

HDFS-11263. ClassCastException when we use Bzipcodec for Fsimage compression. Contributed by Brahma Reddy Battula.

(cherry picked from commit 1b401f6a734df4e23a79b3bd89c816a1fc0de574)
Brahma Reddy Battula 8 年之前
父節點
當前提交
3e0bd9951f

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -210,6 +210,9 @@ Release 2.7.4 - UNRELEASED
     HDFS-11160. VolumeScanner reports write-in-progress replicas as corrupt
     incorrectly. Contributed by Wei-Chiu Chuang and Yongjun Zhang.
 
+    HDFS-11263. ClassCastException when we use Bzipcodec for Fsimage
+    compression. (Brahma Reddy Battula)
+
 Release 2.7.3 - 2016-08-25
 
   INCOMPATIBLE CHANGES

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java

@@ -40,6 +40,7 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
+import org.apache.hadoop.io.compress.CompressionOutputStream;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -62,7 +63,6 @@ import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType;
 import org.apache.hadoop.hdfs.util.MD5FileUtils;
 import org.apache.hadoop.io.MD5Hash;
 import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.hadoop.io.compress.CompressorStream;
 import org.apache.hadoop.util.LimitInputStream;
 import org.apache.hadoop.util.Time;
 
@@ -401,7 +401,7 @@ public final class FSImageFormatProtobuf {
 
     private void flushSectionOutputStream() throws IOException {
       if (codec != null) {
-        ((CompressorStream) sectionOutputStream).finish();
+        ((CompressionOutputStream) sectionOutputStream).finish();
       }
       sectionOutputStream.flush();
     }

+ 9 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSImage.java

@@ -62,8 +62,15 @@ public class TestFSImage {
   public void testCompression() throws IOException {
     Configuration conf = new Configuration();
     conf.setBoolean(DFSConfigKeys.DFS_IMAGE_COMPRESS_KEY, true);
-    conf.set(DFSConfigKeys.DFS_IMAGE_COMPRESSION_CODEC_KEY,
-        "org.apache.hadoop.io.compress.GzipCodec");
+    setCompressCodec(conf, "org.apache.hadoop.io.compress.DefaultCodec");
+    setCompressCodec(conf, "org.apache.hadoop.io.compress.GzipCodec");
+    setCompressCodec(conf, "org.apache.hadoop.io.compress.BZip2Codec");
+    setCompressCodec(conf, "org.apache.hadoop.io.compress.Lz4Codec");
+  }
+
+  private void setCompressCodec(Configuration conf, String compressCodec)
+      throws IOException {
+    conf.set(DFSConfigKeys.DFS_IMAGE_COMPRESSION_CODEC_KEY, compressCodec);
     testPersistHelper(conf);
   }