HadoopCommon.cmake 8.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207
  1. #
  2. # Licensed to the Apache Software Foundation (ASF) under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. The ASF licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. #
  18. #
  19. # Common CMake utilities and configuration, shared by all Native components.
  20. #
  21. #
  22. # Platform-specific prerequisite checks.
  23. #
  24. if(CMAKE_SYSTEM_NAME STREQUAL "SunOS")
  25. # Only 64-bit Java is supported.
  26. if(NOT JVM_ARCH_DATA_MODEL EQUAL 64)
  27. message(FATAL_ERROR "Unrecognised JVM_ARCH_DATA_MODEL '${JVM_ARCH_DATA_MODEL}'. "
  28. "A 64-bit JVM must be used on Solaris, make sure that one is installed and, "
  29. "if necessary, the MAVEN_OPTS environment variable includes '-d64'")
  30. endif()
  31. # Only gcc is suported for now.
  32. if(NOT(CMAKE_COMPILER_IS_GNUCC AND CMAKE_COMPILER_IS_GNUCXX))
  33. message(FATAL_ERROR "Only gcc is supported on Solaris")
  34. endif()
  35. endif()
  36. #
  37. # Helper functions and macros.
  38. #
  39. # Add flags to all the CMake compiler variables
  40. macro(hadoop_add_compiler_flags FLAGS)
  41. set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FLAGS}")
  42. set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAGS}")
  43. endmacro()
  44. # Add flags to all the CMake linker variables
  45. macro(hadoop_add_linker_flags FLAGS)
  46. set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${FLAGS}")
  47. set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${FLAGS}")
  48. set(CMAKE_STATIC_LINKER_FLAGS "${CMAKE_STATIC_LINKER_FLAGS} ${FLAGS}")
  49. endmacro()
  50. # Compile a library with both shared and static variants.
  51. function(hadoop_add_dual_library LIBNAME)
  52. add_library(${LIBNAME} SHARED ${ARGN})
  53. add_library(${LIBNAME}_static STATIC ${ARGN})
  54. set_target_properties(${LIBNAME}_static PROPERTIES OUTPUT_NAME ${LIBNAME})
  55. endfunction()
  56. # Link both a static and a dynamic target against some libraries.
  57. function(hadoop_target_link_dual_libraries LIBNAME)
  58. target_link_libraries(${LIBNAME} ${ARGN})
  59. target_link_libraries(${LIBNAME}_static ${ARGN})
  60. endfunction()
  61. # Set all the output directories to the same place.
  62. function(hadoop_output_directory TGT DIR)
  63. set_target_properties(${TGT} PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
  64. set_target_properties(${TGT} PROPERTIES ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
  65. set_target_properties(${TGT} PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
  66. endfunction()
  67. # Set the target directories for dynamic and static builds.
  68. function(hadoop_dual_output_directory TGT DIR)
  69. hadoop_output_directory(${TGT} "${DIR}")
  70. hadoop_output_directory(${TGT}_static "${DIR}")
  71. endfunction()
  72. # Alter the behavior of find_package and find_library so that we find only
  73. # shared libraries with a given version suffix. You should save
  74. # CMAKE_FIND_LIBRARY_SUFFIXES before calling this function and restore it
  75. # afterwards. On Windows this function is a no-op. Windows does not encode
  76. # version number information information into library path names.
  77. macro(hadoop_set_find_shared_library_version LVERS)
  78. if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
  79. # Mac OS uses .dylib
  80. set(CMAKE_FIND_LIBRARY_SUFFIXES ".${LVERS}.dylib")
  81. elseif(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD")
  82. # FreeBSD has always .so installed.
  83. set(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
  84. elseif(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
  85. # Windows doesn't support finding shared libraries by version.
  86. else()
  87. # Most UNIX variants use .so
  88. set(CMAKE_FIND_LIBRARY_SUFFIXES ".so.${LVERS}")
  89. endif()
  90. endmacro()
  91. # Alter the behavior of find_package and find_library so that we find only
  92. # shared libraries without any version suffix. You should save
  93. # CMAKE_FIND_LIBRARY_SUFFIXES before calling this function and restore it
  94. # afterwards. On Windows this function is a no-op. Windows does not encode
  95. # version number information information into library path names.
  96. macro(hadoop_set_find_shared_library_without_version)
  97. if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
  98. # Mac OS uses .dylib
  99. set(CMAKE_FIND_LIBRARY_SUFFIXES ".dylib")
  100. elseif(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
  101. # No effect
  102. else()
  103. # Most UNIX variants use .so
  104. set(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
  105. endif()
  106. endmacro()
  107. #
  108. # Configuration.
  109. #
  110. # Initialise the shared gcc/g++ flags if they aren't already defined.
  111. if(NOT DEFINED GCC_SHARED_FLAGS)
  112. set(GCC_SHARED_FLAGS "-g -O2 -Wall -pthread -D_FILE_OFFSET_BITS=64")
  113. endif()
  114. # Add in support other compilers here, if necessary,
  115. # the assumption is that GCC or a GCC-compatible compiler is being used.
  116. # Set the shared GCC-compatible compiler and linker flags.
  117. hadoop_add_compiler_flags("${GCC_SHARED_FLAGS}")
  118. hadoop_add_linker_flags("${LINKER_SHARED_FLAGS}")
  119. #
  120. # Linux-specific configuration.
  121. #
  122. if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
  123. # Make GNU extensions available.
  124. hadoop_add_compiler_flags("-D_GNU_SOURCE")
  125. # If JVM_ARCH_DATA_MODEL is 32, compile all binaries as 32-bit.
  126. if(JVM_ARCH_DATA_MODEL EQUAL 32)
  127. # Force 32-bit code generation on amd64/x86_64, ppc64, sparc64
  128. if(CMAKE_COMPILER_IS_GNUCC AND CMAKE_SYSTEM_PROCESSOR MATCHES ".*64")
  129. hadoop_add_compiler_flags("-m32")
  130. hadoop_add_linker_flags("-m32")
  131. endif()
  132. # Set CMAKE_SYSTEM_PROCESSOR to ensure that find_package(JNI) will use 32-bit libraries
  133. if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
  134. set(CMAKE_SYSTEM_PROCESSOR "i686")
  135. endif()
  136. endif()
  137. # Determine float ABI of JVM on ARM.
  138. if(CMAKE_SYSTEM_PROCESSOR MATCHES "^arm")
  139. find_program(READELF readelf)
  140. if(READELF MATCHES "NOTFOUND")
  141. message(WARNING "readelf not found; JVM float ABI detection disabled")
  142. else(READELF MATCHES "NOTFOUND")
  143. execute_process(
  144. COMMAND ${READELF} -A ${JAVA_JVM_LIBRARY}
  145. OUTPUT_VARIABLE JVM_ELF_ARCH
  146. ERROR_QUIET)
  147. if(NOT JVM_ELF_ARCH MATCHES "Tag_ABI_VFP_args: VFP registers")
  148. # Test compilation with -mfloat-abi=softfp using an arbitrary libc function
  149. # (typically fails with "fatal error: bits/predefs.h: No such file or directory"
  150. # if soft-float dev libraries are not installed)
  151. message("Soft-float JVM detected")
  152. include(CMakePushCheckState)
  153. cmake_push_check_state()
  154. set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -mfloat-abi=softfp")
  155. include(CheckSymbolExists)
  156. check_symbol_exists(exit stdlib.h SOFTFP_AVAILABLE)
  157. if(NOT SOFTFP_AVAILABLE)
  158. message(FATAL_ERROR "Soft-float dev libraries required (e.g. 'apt-get install libc6-dev-armel' on Debian/Ubuntu)")
  159. endif()
  160. cmake_pop_check_state()
  161. hadoop_add_compiler_flags("-mfloat-abi=softfp")
  162. endif()
  163. endif()
  164. endif()
  165. #
  166. # Solaris-specific configuration.
  167. #
  168. elseif(CMAKE_SYSTEM_NAME STREQUAL "SunOS")
  169. # Solaris flags. 64-bit compilation is mandatory, and is checked earlier.
  170. hadoop_add_compiler_flags("-m64 -D__EXTENSIONS__ -D_POSIX_PTHREAD_SEMANTICS -D_XOPEN_SOURCE=500")
  171. hadoop_add_linker_flags("-m64")
  172. # CMAKE_SYSTEM_PROCESSOR is set to the output of 'uname -p', which on Solaris is
  173. # the 'lowest' ISA supported, i.e. 'i386' or 'sparc'. However in order for the
  174. # standard CMake modules to look in the right places it needs to reflect the required
  175. # compilation mode, i.e. 64 bit. We therefore force it to either 'amd64' or 'sparcv9'.
  176. if(CMAKE_SYSTEM_PROCESSOR STREQUAL "i386")
  177. set(CMAKE_SYSTEM_PROCESSOR "amd64")
  178. set(CMAKE_LIBRARY_ARCHITECTURE "amd64")
  179. elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "sparc")
  180. set(CMAKE_SYSTEM_PROCESSOR STREQUAL "sparcv9")
  181. set(CMAKE_LIBRARY_ARCHITECTURE "sparcv9")
  182. else()
  183. message(FATAL_ERROR "Unrecognised CMAKE_SYSTEM_PROCESSOR ${CMAKE_SYSTEM_PROCESSOR}")
  184. endif()
  185. endif()