浏览代码

HADOOP-9309. Test failures on Windows due to UnsatisfiedLinkError in NativeCodeLoader#buildSupportsSnappy. Contributed by Arpit Agarwal.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-trunk-win@1448769 13f79535-47bb-0310-9956-ffa450edef68
Suresh Srinivas 12 年之前
父节点
当前提交
f827d6a634

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt

@@ -83,6 +83,9 @@ branch-trunk-win changes - unreleased
   HADOOP-9313. Remove spurious mkdir from hadoop-config.cmd.
   (Ivan Mitic via suresh)
 
+  HADOOP-9309. Test failures on Windows due to UnsatisfiedLinkError
+  in NativeCodeLoader#buildSupportsSnappy. (Arpit Agarwal via suresh)
+
 Patch equivalent to trunk committed to branch-trunk-win
 
   HADOOP-8924. Add maven plugin alternative to shell script to save

+ 5 - 0
hadoop-common-project/hadoop-common/src/main/native/native.vcxproj

@@ -41,6 +41,7 @@
       <PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;_USRDLL;NATIVE_EXPORTS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
       <AdditionalIncludeDirectories>..\winutils\include;..\..\..\target\native\javah;%JAVA_HOME%\include;%JAVA_HOME%\include\win32;.\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
       <CompileAs>CompileAsC</CompileAs>
+      <DisableSpecificWarnings>4244</DisableSpecificWarnings>
     </ClCompile>
     <Link>
       <SubSystem>Windows</SubSystem>
@@ -52,9 +53,13 @@
     </Link>
   </ItemDefinitionGroup>
   <ItemGroup>
+    <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4.c" />
+    <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Compressor.c" />
+    <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Decompressor.c" />
     <ClCompile Include="src\org\apache\hadoop\io\nativeio\file_descriptor.c" />
     <ClCompile Include="src\org\apache\hadoop\io\nativeio\NativeIO.c" />
     <ClCompile Include="src\org\apache\hadoop\util\bulk_crc32.c" />
+    <ClCompile Include="src\org\apache\hadoop\util\NativeCodeLoader.c" />
     <ClCompile Include="src\org\apache\hadoop\util\NativeCrc32.c" />
   </ItemGroup>
   <ItemGroup>

+ 12 - 0
hadoop-common-project/hadoop-common/src/main/native/native.vcxproj.filters

@@ -27,6 +27,18 @@
     <ClCompile Include="src\org\apache\hadoop\util\NativeCrc32.c">
       <Filter>Source Files</Filter>
     </ClCompile>
+    <ClCompile Include="src\org\apache\hadoop\util\NativeCodeLoader.c">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4.c">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Compressor.c">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Decompressor.c">
+      <Filter>Source Files</Filter>
+    </ClCompile>
   </ItemGroup>
   <ItemGroup>
     <ClInclude Include="..\src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h">

+ 10 - 3
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c

@@ -16,10 +16,14 @@
  * limitations under the License.
  */
 
-#include "config.h"
+
 #include "org_apache_hadoop.h"
 #include "org_apache_hadoop_io_compress_lz4_Lz4Compressor.h"
 
+#ifdef UNIX
+#include "config.h"
+#endif // UNIX
+
 //****************************
 // Simple Functions
 //****************************
@@ -61,6 +65,9 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Compressor_init
 
 JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Compressor_compressBytesDirect
 (JNIEnv *env, jobject thisj){
+  const char* uncompressed_bytes;
+  char *compressed_bytes;
+
   // Get members of Lz4Compressor
   jobject clazz = (*env)->GetStaticObjectField(env, thisj, Lz4Compressor_clazz);
   jobject uncompressed_direct_buf = (*env)->GetObjectField(env, thisj, Lz4Compressor_uncompressedDirectBuf);
@@ -70,7 +77,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Compressor_comp
 
   // Get the input direct buffer
   LOCK_CLASS(env, clazz, "Lz4Compressor");
-  const char* uncompressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
+  uncompressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
   UNLOCK_CLASS(env, clazz, "Lz4Compressor");
 
   if (uncompressed_bytes == 0) {
@@ -79,7 +86,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Compressor_comp
 
   // Get the output direct buffer
   LOCK_CLASS(env, clazz, "Lz4Compressor");
-  char* compressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
+  compressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
   UNLOCK_CLASS(env, clazz, "Lz4Compressor");
 
   if (compressed_bytes == 0) {

+ 9 - 3
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c

@@ -16,10 +16,13 @@
  * limitations under the License.
  */
 
-#include "config.h"
 #include "org_apache_hadoop.h"
 #include "org_apache_hadoop_io_compress_lz4_Lz4Decompressor.h"
 
+#ifdef UNIX
+#include "config.h"
+#endif // UNIX
+
 int LZ4_uncompress_unknownOutputSize(const char* source, char* dest, int isize, int maxOutputSize);
 
 /*
@@ -58,6 +61,9 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Decompressor_in
 
 JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Decompressor_decompressBytesDirect
 (JNIEnv *env, jobject thisj){
+  const char *compressed_bytes;
+  char *uncompressed_bytes;
+
   // Get members of Lz4Decompressor
   jobject clazz = (*env)->GetStaticObjectField(env,thisj, Lz4Decompressor_clazz);
   jobject compressed_direct_buf = (*env)->GetObjectField(env,thisj, Lz4Decompressor_compressedDirectBuf);
@@ -67,7 +73,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Decompressor_de
 
   // Get the input direct buffer
   LOCK_CLASS(env, clazz, "Lz4Decompressor");
-  const char* compressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
+  compressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
   UNLOCK_CLASS(env, clazz, "Lz4Decompressor");
 
   if (compressed_bytes == 0) {
@@ -76,7 +82,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Decompressor_de
 
   // Get the output direct buffer
   LOCK_CLASS(env, clazz, "Lz4Decompressor");
-  char* uncompressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
+  uncompressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
   UNLOCK_CLASS(env, clazz, "Lz4Decompressor");
 
   if (uncompressed_bytes == 0) {

+ 5 - 1
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c

@@ -16,7 +16,11 @@
  * limitations under the License.
  */
 
+#include "org_apache_hadoop.h"
+
+#ifdef UNIX
 #include "config.h"
+#endif // UNIX
 
 #include <jni.h>
 
@@ -28,4 +32,4 @@ JNIEXPORT jboolean JNICALL Java_org_apache_hadoop_util_NativeCodeLoader_buildSup
 #else
   return JNI_FALSE;
 #endif
-}
+}