HadoopCommon.cmake 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215
  1. #
  2. # Licensed to the Apache Software Foundation (ASF) under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. The ASF licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. #
  18. #
  19. # Common CMake utilities and configuration, shared by all Native components.
  20. #
  21. #
  22. # Platform-specific prerequisite checks.
  23. #
  24. if(CMAKE_SYSTEM_NAME STREQUAL "SunOS")
  25. # Only 64-bit Java is supported.
  26. if(NOT JVM_ARCH_DATA_MODEL EQUAL 64)
  27. message(FATAL_ERROR "Unrecognised JVM_ARCH_DATA_MODEL '${JVM_ARCH_DATA_MODEL}'. "
  28. "A 64-bit JVM must be used on Solaris, make sure that one is installed and, "
  29. "if necessary, the MAVEN_OPTS environment variable includes '-d64'")
  30. endif()
  31. # Only gcc is suported for now.
  32. if(NOT(CMAKE_COMPILER_IS_GNUCC AND CMAKE_COMPILER_IS_GNUCXX))
  33. message(FATAL_ERROR "Only gcc is supported on Solaris")
  34. endif()
  35. endif()
  36. #
  37. # Helper functions and macros.
  38. #
  39. # Add flags to all the CMake compiler variables
  40. macro(hadoop_add_compiler_flags FLAGS)
  41. set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FLAGS}")
  42. set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAGS}")
  43. endmacro()
  44. # Add flags to all the CMake linker variables.
  45. macro(hadoop_add_linker_flags FLAGS)
  46. set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${FLAGS}")
  47. set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${FLAGS}")
  48. endmacro()
  49. # Compile a library with both shared and static variants.
  50. function(hadoop_add_dual_library LIBNAME)
  51. add_library(${LIBNAME} SHARED ${ARGN})
  52. add_library(${LIBNAME}_static STATIC ${ARGN})
  53. set_target_properties(${LIBNAME}_static PROPERTIES OUTPUT_NAME ${LIBNAME})
  54. endfunction()
  55. # Link both a static and a dynamic target against some libraries.
  56. function(hadoop_target_link_dual_libraries LIBNAME)
  57. target_link_libraries(${LIBNAME} ${ARGN})
  58. target_link_libraries(${LIBNAME}_static ${ARGN})
  59. endfunction()
  60. # Set all the output directories to the same place.
  61. function(hadoop_output_directory TGT DIR)
  62. set_target_properties(${TGT} PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
  63. set_target_properties(${TGT} PROPERTIES ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
  64. set_target_properties(${TGT} PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
  65. endfunction()
  66. # Set the target directories for dynamic and static builds.
  67. function(hadoop_dual_output_directory TGT DIR)
  68. hadoop_output_directory(${TGT} "${DIR}")
  69. hadoop_output_directory(${TGT}_static "${DIR}")
  70. endfunction()
  71. # Alter the behavior of find_package and find_library so that we find only
  72. # shared libraries with a given version suffix. You should save
  73. # CMAKE_FIND_LIBRARY_SUFFIXES before calling this function and restore it
  74. # afterwards. On Windows this function is a no-op. Windows does not encode
  75. # version number information information into library path names.
  76. macro(hadoop_set_find_shared_library_version LVERS)
  77. if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
  78. # Mac OS uses .dylib
  79. set(CMAKE_FIND_LIBRARY_SUFFIXES ".${LVERS}.dylib")
  80. elseif(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD")
  81. # FreeBSD has always .so installed.
  82. set(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
  83. elseif(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
  84. # Windows doesn't support finding shared libraries by version.
  85. else()
  86. # Most UNIX variants use .so
  87. set(CMAKE_FIND_LIBRARY_SUFFIXES ".so.${LVERS}")
  88. endif()
  89. endmacro()
  90. # Alter the behavior of find_package and find_library so that we find only
  91. # shared libraries without any version suffix. You should save
  92. # CMAKE_FIND_LIBRARY_SUFFIXES before calling this function and restore it
  93. # afterwards. On Windows this function is a no-op. Windows does not encode
  94. # version number information information into library path names.
  95. macro(hadoop_set_find_shared_library_without_version)
  96. if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
  97. # Mac OS uses .dylib
  98. set(CMAKE_FIND_LIBRARY_SUFFIXES ".dylib")
  99. elseif(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
  100. # No effect
  101. else()
  102. # Most UNIX variants use .so
  103. set(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
  104. endif()
  105. endmacro()
  106. # set the shared compiler flags
  107. # support for GNU C/C++, add other compilers as necessary
  108. if (CMAKE_C_COMPILER_ID STREQUAL "GNU" OR
  109. CMAKE_C_COMPILER_ID STREQUAL "Clang" OR
  110. CMAKE_C_COMPILER_ID STREQUAL "AppleClang")
  111. if(NOT DEFINED GCC_SHARED_FLAGS)
  112. find_package(Threads REQUIRED)
  113. if(CMAKE_USE_PTHREADS_INIT)
  114. set(GCC_SHARED_FLAGS "-g -O2 -Wall -pthread -D_FILE_OFFSET_BITS=64")
  115. else()
  116. set(GCC_SHARED_FLAGS "-g -O2 -Wall -D_FILE_OFFSET_BITS=64")
  117. endif()
  118. endif()
  119. endif()
  120. # Set the shared linker flags.
  121. hadoop_add_compiler_flags("${GCC_SHARED_FLAGS}")
  122. hadoop_add_linker_flags("${LINKER_SHARED_FLAGS}")
  123. #
  124. # Linux-specific configuration.
  125. #
  126. if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
  127. # Make GNU extensions available.
  128. hadoop_add_compiler_flags("-D_GNU_SOURCE")
  129. # If JVM_ARCH_DATA_MODEL is 32, compile all binaries as 32-bit.
  130. if(JVM_ARCH_DATA_MODEL EQUAL 32)
  131. # Force 32-bit code generation on amd64/x86_64, ppc64, sparc64
  132. if(CMAKE_COMPILER_IS_GNUCC AND CMAKE_SYSTEM_PROCESSOR MATCHES ".*64")
  133. hadoop_add_compiler_flags("-m32")
  134. hadoop_add_linker_flags("-m32")
  135. endif()
  136. # Set CMAKE_SYSTEM_PROCESSOR to ensure that find_package(JNI) will use 32-bit libraries
  137. if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
  138. set(CMAKE_SYSTEM_PROCESSOR "i686")
  139. endif()
  140. endif()
  141. # Determine float ABI of JVM on ARM.
  142. if(CMAKE_SYSTEM_PROCESSOR MATCHES "^arm")
  143. find_program(READELF readelf)
  144. if(READELF MATCHES "NOTFOUND")
  145. message(WARNING "readelf not found; JVM float ABI detection disabled")
  146. else(READELF MATCHES "NOTFOUND")
  147. execute_process(
  148. COMMAND ${READELF} -A ${JAVA_JVM_LIBRARY}
  149. OUTPUT_VARIABLE JVM_ELF_ARCH
  150. ERROR_QUIET)
  151. if(NOT JVM_ELF_ARCH MATCHES "Tag_ABI_VFP_args: VFP registers")
  152. # Test compilation with -mfloat-abi=softfp using an arbitrary libc function
  153. # (typically fails with "fatal error: bits/predefs.h: No such file or directory"
  154. # if soft-float dev libraries are not installed)
  155. message("Soft-float JVM detected")
  156. include(CMakePushCheckState)
  157. cmake_push_check_state()
  158. set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -mfloat-abi=softfp")
  159. include(CheckSymbolExists)
  160. check_symbol_exists(exit stdlib.h SOFTFP_AVAILABLE)
  161. if(NOT SOFTFP_AVAILABLE)
  162. message(FATAL_ERROR "Soft-float dev libraries required (e.g. 'apt-get install libc6-dev-armel' on Debian/Ubuntu)")
  163. endif()
  164. cmake_pop_check_state()
  165. hadoop_add_compiler_flags("-mfloat-abi=softfp")
  166. endif()
  167. endif()
  168. endif()
  169. #
  170. # Solaris-specific configuration.
  171. #
  172. elseif(CMAKE_SYSTEM_NAME STREQUAL "SunOS")
  173. # Solaris flags. 64-bit compilation is mandatory, and is checked earlier.
  174. hadoop_add_compiler_flags("-m64 -D_POSIX_C_SOURCE=200112L -D__EXTENSIONS__ -D_POSIX_PTHREAD_SEMANTICS")
  175. set(CMAKE_CXX_STANDARD 98)
  176. hadoop_add_linker_flags("-m64")
  177. # CMAKE_SYSTEM_PROCESSOR is set to the output of 'uname -p', which on Solaris is
  178. # the 'lowest' ISA supported, i.e. 'i386' or 'sparc'. However in order for the
  179. # standard CMake modules to look in the right places it needs to reflect the required
  180. # compilation mode, i.e. 64 bit. We therefore force it to either 'amd64' or 'sparcv9'.
  181. if(CMAKE_SYSTEM_PROCESSOR STREQUAL "i386")
  182. set(CMAKE_SYSTEM_PROCESSOR "amd64")
  183. set(CMAKE_LIBRARY_ARCHITECTURE "amd64")
  184. elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "sparc")
  185. set(CMAKE_SYSTEM_PROCESSOR "sparcv9")
  186. set(CMAKE_LIBRARY_ARCHITECTURE "sparcv9")
  187. else()
  188. message(FATAL_ERROR "Unrecognised CMAKE_SYSTEM_PROCESSOR ${CMAKE_SYSTEM_PROCESSOR}")
  189. endif()
  190. endif()
  191. # Set GNU99 as the C standard to use
  192. set(CMAKE_C_STANDARD 99)