Browse Source

HADOOP-8151. Error handling in snappy decompressor throws invalid exceptions. Contributed by Matt Foley. (harsh)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1389006 13f79535-47bb-0310-9956-ffa450edef68
(cherry picked from commit ac31d6a4485d7ff9074fb5dade7a6cf5292bb347)

Conflicts:

	hadoop-common-project/hadoop-common/CHANGES.txt
Harsh J 12 years ago
parent
commit
55427fb66c

+ 4 - 1
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -383,7 +383,10 @@ Release 2.8.0 - UNRELEASED
     (Brahma Reddy Battula via aajisaka)
 
     HADOOP-12076. Incomplete Cache Mechanism in CredentialProvider API.
-    (Larry McCay via cnauroth)
+    (Larry McCay via cnauroth) 
+
+    HADOOP-8151. Error handling in snappy decompressor throws invalid
+    exceptions. (Matt Foley via harsh)
 
 Release 2.7.1 - UNRELEASED
 

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c

@@ -75,7 +75,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Compressor_comp
 
   compressed_direct_buf_len = LZ4_compress(uncompressed_bytes, compressed_bytes, uncompressed_direct_buf_len);
   if (compressed_direct_buf_len < 0){
-    THROW(env, "Ljava/lang/InternalError", "LZ4_compress failed");
+    THROW(env, "java/lang/InternalError", "LZ4_compress failed");
   }
 
   (*env)->SetIntField(env, thisj, Lz4Compressor_uncompressedDirectBufLen, 0);

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c

@@ -72,7 +72,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Decompressor_de
 
   uncompressed_direct_buf_len = LZ4_decompress_safe(compressed_bytes, uncompressed_bytes, compressed_direct_buf_len, uncompressed_direct_buf_len);
   if (uncompressed_direct_buf_len < 0) {
-    THROW(env, "Ljava/lang/InternalError", "LZ4_uncompress_unknownOutputSize failed.");
+    THROW(env, "java/lang/InternalError", "LZ4_uncompress_unknownOutputSize failed.");
   }
 
   (*env)->SetIntField(env, thisj, Lz4Decompressor_compressedDirectBufLen, 0);

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c

@@ -126,11 +126,11 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompresso
   ret = dlsym_snappy_compress(uncompressed_bytes, uncompressed_direct_buf_len,
         compressed_bytes, &buf_len);
   if (ret != SNAPPY_OK){
-    THROW(env, "Ljava/lang/InternalError", "Could not compress data. Buffer length is too small.");
+    THROW(env, "java/lang/InternalError", "Could not compress data. Buffer length is too small.");
     return 0;
   }
   if (buf_len > JINT_MAX) {
-    THROW(env, "Ljava/lang/InternalError", "Invalid return buffer length.");
+    THROW(env, "java/lang/InternalError", "Invalid return buffer length.");
     return 0;
   }
 

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c

@@ -118,11 +118,11 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyDecompres
   ret = dlsym_snappy_uncompress(compressed_bytes, compressed_direct_buf_len,
         uncompressed_bytes, &uncompressed_direct_buf_len);
   if (ret == SNAPPY_BUFFER_TOO_SMALL){
-    THROW(env, "Ljava/lang/InternalError", "Could not decompress data. Buffer length is too small.");
+    THROW(env, "java/lang/InternalError", "Could not decompress data. Buffer length is too small.");
   } else if (ret == SNAPPY_INVALID_INPUT){
-    THROW(env, "Ljava/lang/InternalError", "Could not decompress data. Input is invalid.");
+    THROW(env, "java/lang/InternalError", "Could not decompress data. Input is invalid.");
   } else if (ret != SNAPPY_OK){
-    THROW(env, "Ljava/lang/InternalError", "Could not decompress data.");
+    THROW(env, "java/lang/InternalError", "Could not decompress data.");
   }
 
   (*env)->SetIntField(env, thisj, SnappyDecompressor_compressedDirectBufLen, 0);