hadoop.sh 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. #
  17. # SHELLDOC-IGNORE
  18. #
  19. # Override these to match Apache Hadoop's requirements
  20. personality_plugins "all,-ant,-gradle,-scalac,-scaladoc"
  21. # These flags are needed to run Yetus against Hadoop on Windows.
  22. WINDOWS_FLAGS="-Pnative-win
  23. -Dhttps.protocols=TLSv1.2
  24. -Drequire.openssl
  25. -Drequire.test.libhadoop
  26. -Dshell-executable=${BASH_EXECUTABLE}
  27. -Dopenssl.prefix=${VCPKG_INSTALLED_PACKAGES}
  28. -Dcmake.prefix.path=${VCPKG_INSTALLED_PACKAGES}
  29. -Dwindows.cmake.toolchain.file=${CMAKE_TOOLCHAIN_FILE}
  30. -Dwindows.cmake.build.type=RelWithDebInfo
  31. -Dwindows.build.hdfspp.dll=off
  32. -Dwindows.no.sasl=on
  33. -Duse.platformToolsetVersion=v142"
  34. ## @description Globals specific to this personality
  35. ## @audience private
  36. ## @stability evolving
  37. function personality_globals
  38. {
  39. # shellcheck disable=SC2034
  40. BUILDTOOL=maven
  41. #shellcheck disable=SC2034
  42. PATCH_BRANCH_DEFAULT=trunk
  43. #shellcheck disable=SC2034
  44. PATCH_NAMING_RULE="https://cwiki.apache.org/confluence/display/HADOOP/How+To+Contribute"
  45. #shellcheck disable=SC2034
  46. JIRA_ISSUE_RE='^(HADOOP|YARN|MAPREDUCE|HDFS)-[0-9]+$'
  47. #shellcheck disable=SC2034
  48. GITHUB_REPO_DEFAULT="apache/hadoop"
  49. HADOOP_HOMEBREW_DIR=${HADOOP_HOMEBREW_DIR:-$(brew --prefix 2>/dev/null)}
  50. if [[ -z "${HADOOP_HOMEBREW_DIR}" ]]; then
  51. HADOOP_HOMEBREW_DIR=/usr/local
  52. fi
  53. }
  54. function personality_parse_args
  55. {
  56. declare i
  57. for i in "$@"; do
  58. case ${i} in
  59. --hadoop-isal-prefix=*)
  60. delete_parameter "${i}"
  61. ISAL_HOME=${i#*=}
  62. ;;
  63. --hadoop-openssl-prefix=*)
  64. delete_parameter "${i}"
  65. OPENSSL_HOME=${i#*=}
  66. ;;
  67. --hadoop-snappy-prefix=*)
  68. delete_parameter "${i}"
  69. SNAPPY_HOME=${i#*=}
  70. ;;
  71. esac
  72. done
  73. }
  74. ## @description Calculate the actual module ordering
  75. ## @audience private
  76. ## @stability evolving
  77. ## @param ordering
  78. function hadoop_order
  79. {
  80. declare ordering=$1
  81. declare hadoopm
  82. if [[ ${ordering} = normal ]]; then
  83. hadoopm="${CHANGED_MODULES[*]}"
  84. elif [[ ${ordering} = union ]]; then
  85. hadoopm="${CHANGED_UNION_MODULES}"
  86. elif [[ ${ordering} = mvnsrc ]]; then
  87. hadoopm="${MAVEN_SRC_MODULES[*]}"
  88. elif [[ ${ordering} = mvnsrctest ]]; then
  89. hadoopm="${MAVEN_SRCTEST_MODULES[*]}"
  90. else
  91. hadoopm="${ordering}"
  92. fi
  93. echo "${hadoopm}"
  94. }
  95. ## @description Retrieves the Hadoop project version defined in the root pom.xml
  96. ## @audience private
  97. ## @stability evolving
  98. ## @returns 0 on success, 1 on failure
  99. function load_hadoop_version
  100. {
  101. if [[ -f "${BASEDIR}/pom.xml" ]]; then
  102. HADOOP_VERSION=$(grep '<version>' "${BASEDIR}/pom.xml" \
  103. | head -1 \
  104. | "${SED}" -e 's|^ *<version>||' -e 's|</version>.*$||' \
  105. | cut -f1 -d- )
  106. return 0
  107. else
  108. return 1
  109. fi
  110. }
  111. ## @description Determine if it is safe to run parallel tests
  112. ## @audience private
  113. ## @stability evolving
  114. ## @param ordering
  115. function hadoop_test_parallel
  116. {
  117. if load_hadoop_version; then
  118. export HADOOP_VERSION
  119. else
  120. return 1
  121. fi
  122. hmajor=${HADOOP_VERSION%%\.*}
  123. hmajorminor=${HADOOP_VERSION%\.*}
  124. hminor=${hmajorminor##*\.}
  125. # ... and just for reference
  126. #hmicro=${HADOOP_VERSION##*\.}
  127. # Apache Hadoop v2.8.0 was the first one to really
  128. # get working parallel unit tests
  129. if [[ ${hmajor} -lt 3 && ${hminor} -lt 8 ]]; then
  130. return 1
  131. fi
  132. return 0
  133. }
  134. ## @description Install extra modules for unit tests
  135. ## @audience private
  136. ## @stability evolving
  137. ## @param ordering
  138. function hadoop_unittest_prereqs
  139. {
  140. declare input=$1
  141. declare mods
  142. declare need_common=0
  143. declare building_common=0
  144. declare module
  145. declare flags
  146. declare fn
  147. # prior to running unit tests, hdfs needs libhadoop.so built
  148. # if we're building root, then this extra work is moot
  149. #shellcheck disable=SC2086
  150. mods=$(hadoop_order ${input})
  151. for module in ${mods}; do
  152. if [[ ${module} = hadoop-hdfs-project* ]]; then
  153. need_common=1
  154. elif [[ ${module} = hadoop-common-project/hadoop-common
  155. || ${module} = hadoop-common-project ]]; then
  156. building_common=1
  157. elif [[ ${module} = . ]]; then
  158. return
  159. fi
  160. done
  161. # Windows builds *ALWAYS* need hadoop-common compiled
  162. case ${OSTYPE} in
  163. Windows_NT|CYGWIN*|MINGW*|MSYS*)
  164. need_common=1
  165. ;;
  166. esac
  167. if [[ ${need_common} -eq 1
  168. && ${building_common} -eq 0 ]]; then
  169. echo "unit test pre-reqs:"
  170. module="hadoop-common-project/hadoop-common"
  171. fn=$(module_file_fragment "${module}")
  172. flags="$(hadoop_native_flags) $(yarn_ui2_flag)"
  173. pushd "${BASEDIR}/${module}" >/dev/null || return 1
  174. # shellcheck disable=SC2086
  175. echo_and_redirect "${PATCH_DIR}/maven-unit-prereq-${fn}-install.txt" \
  176. "${MAVEN}" "${MAVEN_ARGS[@]}" install -DskipTests ${flags}
  177. popd >/dev/null || return 1
  178. fi
  179. }
  180. ## @description Calculate the flags/settings for yarn-ui v2 build
  181. ## @description based upon the OS
  182. ## @audience private
  183. ## @stability evolving
  184. function yarn_ui2_flag
  185. {
  186. if [[ ${BUILD_NATIVE} != true ]]; then
  187. return
  188. fi
  189. # Now it only tested on Linux/OSX, don't enable the profile on
  190. # windows until it get verified
  191. case ${OSTYPE} in
  192. Linux)
  193. # shellcheck disable=SC2086
  194. echo -Pyarn-ui
  195. ;;
  196. Darwin)
  197. echo -Pyarn-ui
  198. ;;
  199. *)
  200. # Do nothing
  201. ;;
  202. esac
  203. }
  204. ## @description Calculate the flags/settings for native code
  205. ## @description based upon the OS
  206. ## @audience private
  207. ## @stability evolving
  208. function hadoop_native_flags
  209. {
  210. if [[ ${BUILD_NATIVE} != true ]]; then
  211. return
  212. fi
  213. declare -a args
  214. # Based upon HADOOP-11937
  215. #
  216. # Some notes:
  217. #
  218. # - getting fuse to compile on anything but Linux
  219. # is always tricky.
  220. # - Darwin assumes homebrew is in use.
  221. # - HADOOP-12027 required for bzip2 on OS X.
  222. # - bzip2 is broken in lots of places
  223. # (the shared library is considered experimental)
  224. # e.g, HADOOP-12027 for OS X. so no -Drequire.bzip2
  225. #
  226. args=("-Drequire.test.libhadoop")
  227. if [[ -d "${ISAL_HOME}/include" ]]; then
  228. args=("${args[@]}" "-Disal.prefix=${ISAL_HOME}")
  229. fi
  230. if [[ -d "${OPENSSL_HOME}/include" ]]; then
  231. args=("${args[@]}" "-Dopenssl.prefix=${OPENSSL_HOME}")
  232. elif [[ -d "${HADOOP_HOMEBREW_DIR}/opt/openssl/" ]]; then
  233. args=("${args[@]}" "-Dopenssl.prefix=${HADOOP_HOMEBREW_DIR}/opt/openssl/")
  234. fi
  235. if [[ -d "${SNAPPY_HOME}/include" ]]; then
  236. args=("${args[@]}" "-Dsnappy.prefix=${SNAPPY_HOME}")
  237. elif [[ -d "${HADOOP_HOMEBREW_DIR}/include/snappy.h" ]]; then
  238. args=("${args[@]}" "-Dsnappy.prefix=${HADOOP_HOMEBREW_DIR}/opt/snappy")
  239. fi
  240. case ${OSTYPE} in
  241. Linux)
  242. # shellcheck disable=SC2086
  243. echo \
  244. -Pnative \
  245. -Drequire.fuse \
  246. -Drequire.openssl \
  247. -Drequire.snappy \
  248. -Drequire.valgrind \
  249. -Drequire.zstd \
  250. "${args[@]}"
  251. ;;
  252. Darwin)
  253. echo \
  254. "${args[@]}" \
  255. -Pnative \
  256. -Drequire.snappy \
  257. -Drequire.openssl
  258. ;;
  259. Windows_NT|CYGWIN*|MINGW*|MSYS*)
  260. echo \
  261. "${args[@]}" \
  262. -Drequire.snappy \
  263. -Pdist \
  264. -Dtar \
  265. "${WINDOWS_FLAGS}"
  266. ;;
  267. *)
  268. echo \
  269. "${args[@]}"
  270. ;;
  271. esac
  272. }
  273. ## @description Queue up modules for this personality
  274. ## @audience private
  275. ## @stability evolving
  276. ## @param repostatus
  277. ## @param testtype
  278. function personality_modules
  279. {
  280. declare repostatus=$1
  281. declare testtype=$2
  282. declare extra=""
  283. declare ordering="normal"
  284. declare needflags=false
  285. declare foundbats=false
  286. declare flags
  287. declare fn
  288. declare i
  289. declare hadoopm
  290. yetus_debug "Personality: ${repostatus} ${testtype}"
  291. clear_personality_queue
  292. case ${testtype} in
  293. asflicense)
  294. # this is very fast and provides the full path if we do it from
  295. # the root of the source
  296. personality_enqueue_module .
  297. return
  298. ;;
  299. checkstyle)
  300. ordering="union"
  301. extra="-DskipTests"
  302. ;;
  303. compile)
  304. ordering="union"
  305. extra="-DskipTests"
  306. needflags=true
  307. # if something in common changed, we build the whole world
  308. if [[ "${CHANGED_MODULES[*]}" =~ hadoop-common ]]; then
  309. yetus_debug "hadoop personality: javac + hadoop-common = ordering set to . "
  310. ordering="."
  311. fi
  312. ;;
  313. distclean)
  314. ordering="."
  315. extra="-DskipTests"
  316. ;;
  317. javadoc)
  318. if [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
  319. ordering=.
  320. fi
  321. if [[ "${repostatus}" = patch && "${BUILDMODE}" = patch ]]; then
  322. echo "javadoc pre-reqs:"
  323. for i in hadoop-project \
  324. hadoop-common-project/hadoop-annotations; do
  325. fn=$(module_file_fragment "${i}")
  326. pushd "${BASEDIR}/${i}" >/dev/null || return 1
  327. echo "cd ${i}"
  328. echo_and_redirect "${PATCH_DIR}/maven-${fn}-install.txt" \
  329. "${MAVEN}" "${MAVEN_ARGS[@]}" install
  330. popd >/dev/null || return 1
  331. done
  332. fi
  333. extra="-Pdocs -DskipTests"
  334. ;;
  335. mvneclipse)
  336. if [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
  337. ordering=.
  338. fi
  339. ;;
  340. mvninstall)
  341. extra="-DskipTests"
  342. if [[ "${repostatus}" = branch || "${BUILDMODE}" = full ]]; then
  343. ordering=.
  344. fi
  345. ;;
  346. mvnsite)
  347. if [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
  348. ordering=.
  349. fi
  350. ;;
  351. unit)
  352. extra="-Dsurefire.rerunFailingTestsCount=2"
  353. if [[ "${BUILDMODE}" = full ]]; then
  354. ordering=mvnsrc
  355. elif [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
  356. ordering=.
  357. fi
  358. if [[ ${TEST_PARALLEL} = "true" ]] ; then
  359. if hadoop_test_parallel; then
  360. extra="${extra} -Pparallel-tests"
  361. if [[ -n ${TEST_THREADS:-} ]]; then
  362. extra="${extra} -DtestsThreadCount=${TEST_THREADS}"
  363. fi
  364. fi
  365. fi
  366. needflags=true
  367. hadoop_unittest_prereqs "${ordering}"
  368. if ! verify_needed_test javac; then
  369. yetus_debug "hadoop: javac not requested"
  370. if ! verify_needed_test native; then
  371. yetus_debug "hadoop: native not requested"
  372. yetus_debug "hadoop: adding -DskipTests to unit test"
  373. extra="-DskipTests"
  374. fi
  375. fi
  376. for i in "${CHANGED_FILES[@]}"; do
  377. if [[ "${i}" =~ \.bats ]]; then
  378. foundbats=true
  379. fi
  380. done
  381. if ! verify_needed_test shellcheck && [[ ${foundbats} = false ]]; then
  382. yetus_debug "hadoop: NO shell code change detected; disabling shelltest profile"
  383. extra="${extra} -P!shelltest"
  384. else
  385. extra="${extra} -Pshelltest"
  386. fi
  387. ;;
  388. *)
  389. extra="-DskipTests"
  390. ;;
  391. esac
  392. if [[ ${needflags} = true ]]; then
  393. flags="$(hadoop_native_flags) $(yarn_ui2_flag)"
  394. extra="${extra} ${flags}"
  395. fi
  396. if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
  397. extra="-Ptest-patch -Pdist -Dtar ${WINDOWS_FLAGS} ${extra}"
  398. fi
  399. for module in $(hadoop_order ${ordering}); do
  400. # shellcheck disable=SC2086
  401. personality_enqueue_module ${module} ${extra}
  402. done
  403. }
  404. ## @description Add tests based upon personality needs
  405. ## @audience private
  406. ## @stability evolving
  407. ## @param filename
  408. function personality_file_tests
  409. {
  410. declare filename=$1
  411. yetus_debug "Using Hadoop-specific personality_file_tests"
  412. if [[ ${filename} =~ src/main/webapp ]]; then
  413. yetus_debug "tests/webapp: ${filename}"
  414. add_test shadedclient
  415. elif [[ ${filename} =~ \.sh
  416. || ${filename} =~ \.cmd
  417. || ${filename} =~ src/scripts
  418. || ${filename} =~ src/test/scripts
  419. || ${filename} =~ src/main/bin
  420. || ${filename} =~ shellprofile\.d
  421. || ${filename} =~ src/main/conf
  422. ]]; then
  423. yetus_debug "tests/shell: ${filename}"
  424. add_test mvnsite
  425. add_test unit
  426. elif [[ ${filename} =~ \.md$
  427. || ${filename} =~ \.md\.vm$
  428. || ${filename} =~ src/site
  429. ]]; then
  430. yetus_debug "tests/site: ${filename}"
  431. add_test mvnsite
  432. elif [[ ${filename} =~ \.c$
  433. || ${filename} =~ \.cc$
  434. || ${filename} =~ \.h$
  435. || ${filename} =~ \.hh$
  436. || ${filename} =~ \.proto$
  437. || ${filename} =~ \.cmake$
  438. || ${filename} =~ CMakeLists.txt
  439. ]]; then
  440. yetus_debug "tests/units: ${filename}"
  441. add_test compile
  442. add_test cc
  443. add_test mvnsite
  444. add_test javac
  445. add_test unit
  446. elif [[ ${filename} =~ build.xml$
  447. || ${filename} =~ pom.xml$
  448. || ${filename} =~ \.java$
  449. || ${filename} =~ src/main
  450. ]]; then
  451. yetus_debug "tests/javadoc+units: ${filename}"
  452. add_test compile
  453. add_test javac
  454. add_test javadoc
  455. add_test mvninstall
  456. add_test mvnsite
  457. add_test unit
  458. add_test shadedclient
  459. fi
  460. # if we change anything in here, e.g. the test scripts
  461. # then run the client artifact tests
  462. if [[ ${filename} =~ hadoop-client-modules ]]; then
  463. add_test shadedclient
  464. fi
  465. if [[ ${filename} =~ src/test ]]; then
  466. yetus_debug "tests: src/test"
  467. add_test unit
  468. fi
  469. if [[ ${filename} =~ \.java$ ]]; then
  470. add_test spotbugs
  471. fi
  472. }
  473. ## @description Image to print on success
  474. ## @audience private
  475. ## @stability evolving
  476. function hadoop_console_success
  477. {
  478. printf "IF9fX19fX19fX18gCjwgU3VjY2VzcyEgPgogLS0tLS0tLS0tLSAKIFwgICAg";
  479. printf "IC9cICBfX18gIC9cCiAgXCAgIC8vIFwvICAgXC8gXFwKICAgICAoKCAgICBP";
  480. printf "IE8gICAgKSkKICAgICAgXFwgLyAgICAgXCAvLwogICAgICAgXC8gIHwgfCAg";
  481. printf "XC8gCiAgICAgICAgfCAgfCB8ICB8ICAKICAgICAgICB8ICB8IHwgIHwgIAog";
  482. printf "ICAgICAgIHwgICBvICAgfCAgCiAgICAgICAgfCB8ICAgfCB8ICAKICAgICAg";
  483. printf "ICB8bXwgICB8bXwgIAo"
  484. }
  485. ###################################################
  486. # Hadoop project specific check of IT for shaded artifacts
  487. add_test_type shadedclient
  488. ## @description check for test modules and add test/plugins as needed
  489. ## @audience private
  490. ## @stability evolving
  491. function shadedclient_initialize
  492. {
  493. maven_add_install shadedclient
  494. }
  495. ## @description build client facing shaded and non-shaded artifacts and test them
  496. ## @audience private
  497. ## @stability evolving
  498. ## @param repostatus
  499. function shadedclient_rebuild
  500. {
  501. declare repostatus=$1
  502. declare logfile="${PATCH_DIR}/${repostatus}-shadedclient.txt"
  503. declare module
  504. declare -a modules=()
  505. if [[ ${OSTYPE} = Windows_NT ||
  506. ${OSTYPE} =~ ^CYGWIN.* ||
  507. ${OSTYPE} =~ ^MINGW32.* ||
  508. ${OSTYPE} =~ ^MSYS.* ]]; then
  509. echo "hadoop personality: building on windows, skipping check of client artifacts."
  510. return 0
  511. fi
  512. yetus_debug "hadoop personality: seeing if we need the test of client artifacts."
  513. for module in hadoop-client-modules/hadoop-client-check-invariants \
  514. hadoop-client-modules/hadoop-client-check-test-invariants \
  515. hadoop-client-modules/hadoop-client-integration-tests; do
  516. if [ -d "${module}" ]; then
  517. yetus_debug "hadoop personality: test module '${module}' is present."
  518. modules+=(-pl "${module}")
  519. fi
  520. done
  521. if [ ${#modules[@]} -eq 0 ]; then
  522. echo "hadoop personality: no test modules present, skipping check of client artifacts."
  523. return 0
  524. fi
  525. big_console_header "Checking client artifacts on ${repostatus} with shaded clients"
  526. extra="-Dtest=NoUnitTests -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true -Dspotbugs.skip=true"
  527. if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
  528. if load_hadoop_version; then
  529. export HADOOP_HOME="${SOURCEDIR}/hadoop-dist/target/hadoop-${HADOOP_VERSION}-SNAPSHOT"
  530. else
  531. yetus_error "[WARNING] Unable to extract the Hadoop version and thus HADOOP_HOME is not set. Some tests may fail."
  532. fi
  533. extra="${WINDOWS_FLAGS} ${extra}"
  534. fi
  535. echo_and_redirect "${logfile}" \
  536. "${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am "${modules[@]}" "${extra}"
  537. big_console_header "Checking client artifacts on ${repostatus} with non-shaded clients"
  538. echo_and_redirect "${logfile}" \
  539. "${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am \
  540. "${modules[@]}" \
  541. -DskipShade -Dtest=NoUnitTests -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true \
  542. -Dspotbugs.skip=true "${extra}"
  543. count=$("${GREP}" -c '\[ERROR\]' "${logfile}")
  544. if [[ ${count} -gt 0 ]]; then
  545. add_vote_table -1 shadedclient "${repostatus} has errors when building and testing our client artifacts."
  546. return 1
  547. fi
  548. add_vote_table +1 shadedclient "${repostatus} has no errors when building and testing our client artifacts."
  549. return 0
  550. }