Ver código fonte

HADOOP-8620. Add -Drequire.fuse and -Drequire.snappy. Contributed by Colin Patrick McCabe

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1368256 13f79535-47bb-0310-9956-ffa450edef68
Eli Collins 12 anos atrás
pai
commit
f23d581bfd

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -377,6 +377,9 @@ Release 2.0.0-alpha - 05-23-2012
 
     HADOOP-8366 Use ProtoBuf for RpcResponseHeader (sanjay radia)
 
+    HADOOP-8620. Add -Drequire.fuse and -Drequire.snappy. (Colin
+    Patrick McCabe via eli)
+
   OPTIMIZATIONS
 
     HADOOP-8422. Deprecate FileSystem#getDefault* and getServerDefault

+ 5 - 11
hadoop-common-project/hadoop-common/pom.xml

@@ -31,10 +31,6 @@
   <packaging>jar</packaging>
 
   <properties>
-    <snappy.prefix>/usr/local</snappy.prefix>
-    <snappy.lib>${snappy.prefix}/lib</snappy.lib>
-    <bundle.snappy>false</bundle.snappy>
-    
     <hadoop.component>common</hadoop.component>
     <is.hadoop.component>true</is.hadoop.component>
   </properties>
@@ -524,10 +520,10 @@
         <activeByDefault>false</activeByDefault>
       </activation>
       <properties>
-        <snappy.prefix>/usr/local</snappy.prefix>
-        <snappy.lib>${snappy.prefix}/lib</snappy.lib>
-        <snappy.include>${snappy.prefix}/include</snappy.include>
-        <runas.home></runas.home>
+        <snappy.prefix></snappy.prefix>
+        <snappy.lib></snappy.lib>
+        <snappy.include></snappy.include>
+        <require.snappy>false</require.snappy>
       </properties>
       <build>
         <plugins>
@@ -570,9 +566,7 @@
                 <configuration>
                   <target>
                     <exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
-                      <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
-                      <env key="CFLAGS" value="-I${snappy.include}"/>
-                      <env key="LDFLAGS" value="-L${snappy.lib}"/>
+                      <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_SNAPPY=${require.snappy} -DCUSTOM_SNAPPY_PREFIX=${snappy.prefix} -DCUSTOM_SNAPPY_LIB=${snappy.lib} -DCUSTOM_SNAPPY_INCLUDE=${snappy.include}"/>
                     </exec>
                     <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
                       <arg line="VERBOSE=1"/>

+ 14 - 5
hadoop-common-project/hadoop-common/src/CMakeLists.txt

@@ -79,17 +79,26 @@ INCLUDE(CheckCSourceCompiles)
 CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
 CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
 
-find_library(SNAPPY_LIBRARY NAMES snappy PATHS)
-find_path(SNAPPY_INCLUDE_DIR NAMES snappy.h PATHS)
-if (SNAPPY_LIBRARY)
+find_library(SNAPPY_LIBRARY 
+    NAMES snappy
+    PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/lib
+          ${CUSTOM_SNAPPY_PREFIX}/lib64 ${CUSTOM_SNAPPY_LIB})
+find_path(SNAPPY_INCLUDE_DIR 
+    NAMES snappy.h
+    PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/include
+          ${CUSTOM_SNAPPY_INCLUDE})
+if (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
     GET_FILENAME_COMPONENT(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME)
     set(SNAPPY_SOURCE_FILES
         "${D}/io/compress/snappy/SnappyCompressor.c"
         "${D}/io/compress/snappy/SnappyDecompressor.c")
-else (${SNAPPY_LIBRARY})
+else (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
     set(SNAPPY_INCLUDE_DIR "")
     set(SNAPPY_SOURCE_FILES "")
-endif (SNAPPY_LIBRARY)
+    IF(REQUIRE_SNAPPY)
+        MESSAGE(FATAL_ERROR "Required snappy library could not be found.  SNAPPY_LIBRARY=${SNAPPY_LIBRARY}, SNAPPY_INCLUDE_DIR=${SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_INCLUDE_DIR=${CUSTOM_SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_PREFIX=${CUSTOM_SNAPPY_PREFIX}, CUSTOM_SNAPPY_INCLUDE=${CUSTOM_SNAPPY_INCLUDE}")
+    ENDIF(REQUIRE_SNAPPY)
+endif (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
 
 include_directories(
     ${GENERATED_JAVAH}

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/pom.xml

@@ -33,6 +33,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <properties>
     <hadoop.component>hdfs</hadoop.component>
     <is.hadoop.component>true</is.hadoop.component>
+    <require.fuse>false</require.fuse>
   </properties>
 
   <dependencies>
@@ -403,7 +404,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
                     <mkdir dir="${project.build.directory}/native"/>
                     <exec executable="cmake" dir="${project.build.directory}/native" 
                         failonerror="true">
-                      <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
+                      <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_FUSE=${require.fuse}"/>
                     </exec>
                     <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
                       <arg line="VERBOSE=1"/>

+ 4 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/native/fuse-dfs/CMakeLists.txt

@@ -71,4 +71,8 @@ IF(FUSE_FOUND)
         m
         pthread
     )
+ELSE(FUSE_FOUND)
+    IF(REQUIRE_FUSE)
+        MESSAGE(FATAL_ERROR "Required component fuse_dfs could not be built.")
+    ENDIF(REQUIRE_FUSE)
 ENDIF(FUSE_FOUND)