Browse Source

HADOOP-5213. Fix Null pointer exception caused when bzip2compression
was used and user closed a output stream without writing any data.
(Zheng Shao via dhruba)



git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/branches/branch-0.19@770550 13f79535-47bb-0310-9956-ffa450edef68

Dhruba Borthakur 16 years ago
parent
commit
a20c705c8e

+ 4 - 0
CHANGES.txt

@@ -104,6 +104,10 @@ Release 0.19.2 - Unreleased
     HADOOP-5671. Fix FNF exceptions when copying from old versions of
     HftpFileSystem. (Tsz Wo (Nicholas), SZE via cdouglas)
 
+    HADOOP-5213. Fix Null pointer exception caused when bzip2compression 
+    was used and user closed a output stream without writing any data.
+    (Zheng Shao via dhruba)
+
 Release 0.19.1 - 2009-02-23 
 
     HADOOP-5225. Workaround for tmp file handling in HDFS. sync() is 

+ 12 - 0
src/core/org/apache/hadoop/io/compress/BZip2Codec.java

@@ -169,6 +169,12 @@ public class BZip2Codec implements
     }
 
     public void finish() throws IOException {
+      if (needsReset) {
+        // In the case that nothing is written to this stream, we still need to
+        // write out the header before closing, otherwise the stream won't be
+        // recognized by BZip2CompressionInputStream.
+        internalReset();
+      }
       this.output.finish();
       needsReset = true;
     }
@@ -202,6 +208,12 @@ public class BZip2Codec implements
     }
 
     public void close() throws IOException {
+      if (needsReset) {
+        // In the case that nothing is written to this stream, we still need to
+        // write out the header before closing, otherwise the stream won't be
+        // recognized by BZip2CompressionInputStream.
+        internalReset();
+      }
       this.output.flush();
       this.output.close();
       needsReset = true;

+ 8 - 2
src/test/org/apache/hadoop/io/compress/TestCodec.java

@@ -52,26 +52,31 @@ public class TestCodec extends TestCase {
   private int seed = new Random().nextInt();
   
   public void testDefaultCodec() throws IOException {
+    codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.DefaultCodec");
     codecTest(conf, seed, count, "org.apache.hadoop.io.compress.DefaultCodec");
   }
   
   public void testGzipCodec() throws IOException {
+    codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.GzipCodec");
     codecTest(conf, seed, count, "org.apache.hadoop.io.compress.GzipCodec");
   }
   
   public void testLzoCodec() throws IOException {
     if (LzoCodec.isNativeLzoLoaded(conf)) {
+      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.LzoCodec");
       codecTest(conf, seed, count, "org.apache.hadoop.io.compress.LzoCodec");
     }
   }
   
   public void testLzopCodec() throws IOException {
     if (LzopCodec.isNativeLzoLoaded(conf)) {
+      codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.LzopCodec");
       codecTest(conf, seed, count, "org.apache.hadoop.io.compress.LzopCodec");
     }
   }
   
   public void testBZip2Codec() throws IOException {    
+    codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");    
     codecTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec");    
   }
 
@@ -150,8 +155,9 @@ public class TestCodec extends TestCase {
   
   public void testSequenceFileBZip2Codec() throws IOException, ClassNotFoundException, 
       InstantiationException, IllegalAccessException {
-    sequenceFileCodecTest(conf, 100, "org.apache.hadoop.io.compress.BZip2Codec", 100);    
-    sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.BZip2Codec", 1000000);    
+    sequenceFileCodecTest(conf, 0, "org.apache.hadoop.io.compress.BZip2Codec", 100);
+    sequenceFileCodecTest(conf, 100, "org.apache.hadoop.io.compress.BZip2Codec", 100);
+    sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.BZip2Codec", 1000000);
   }
   
   private static void sequenceFileCodecTest(Configuration conf, int lines,