hadoop.sh 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. #
  17. # SHELLDOC-IGNORE
  18. #
  19. # Override these to match Apache Hadoop's requirements
  20. personality_plugins "all,-ant,-gradle,-scalac,-scaladoc"
  21. # These flags are needed to run Yetus against Hadoop on Windows.
  22. WINDOWS_FLAGS=(
  23. "-Pnative-win"
  24. "-Dhttps.protocols=TLSv1.2"
  25. "-Drequire.openssl"
  26. "-Drequire.test.libhadoop"
  27. "-Dshell-executable=${BASH_EXECUTABLE}"
  28. "-Dopenssl.prefix=${VCPKG_INSTALLED_PACKAGES}"
  29. "-Dcmake.prefix.path=${VCPKG_INSTALLED_PACKAGES}"
  30. "-Dwindows.cmake.toolchain.file=${CMAKE_TOOLCHAIN_FILE}"
  31. "-Dwindows.cmake.build.type=RelWithDebInfo"
  32. "-Dwindows.build.hdfspp.dll=off"
  33. "-Dwindows.no.sasl=on"
  34. "-Duse.platformToolsetVersion=v142"
  35. )
  36. ## @description Globals specific to this personality
  37. ## @audience private
  38. ## @stability evolving
  39. function personality_globals
  40. {
  41. # shellcheck disable=SC2034
  42. BUILDTOOL=maven
  43. #shellcheck disable=SC2034
  44. PATCH_BRANCH_DEFAULT=trunk
  45. #shellcheck disable=SC2034
  46. PATCH_NAMING_RULE="https://cwiki.apache.org/confluence/display/HADOOP/How+To+Contribute"
  47. #shellcheck disable=SC2034
  48. JIRA_ISSUE_RE='^(HADOOP|YARN|MAPREDUCE|HDFS)-[0-9]+$'
  49. #shellcheck disable=SC2034
  50. GITHUB_REPO_DEFAULT="apache/hadoop"
  51. HADOOP_HOMEBREW_DIR=${HADOOP_HOMEBREW_DIR:-$(brew --prefix 2>/dev/null)}
  52. if [[ -z "${HADOOP_HOMEBREW_DIR}" ]]; then
  53. HADOOP_HOMEBREW_DIR=/usr/local
  54. fi
  55. }
  56. function personality_parse_args
  57. {
  58. declare i
  59. for i in "$@"; do
  60. case ${i} in
  61. --hadoop-isal-prefix=*)
  62. delete_parameter "${i}"
  63. ISAL_HOME=${i#*=}
  64. ;;
  65. --hadoop-openssl-prefix=*)
  66. delete_parameter "${i}"
  67. OPENSSL_HOME=${i#*=}
  68. ;;
  69. --hadoop-snappy-prefix=*)
  70. delete_parameter "${i}"
  71. SNAPPY_HOME=${i#*=}
  72. ;;
  73. esac
  74. done
  75. }
  76. ## @description Calculate the actual module ordering
  77. ## @audience private
  78. ## @stability evolving
  79. ## @param ordering
  80. function hadoop_order
  81. {
  82. declare ordering=$1
  83. declare hadoopm
  84. if [[ ${ordering} = normal ]]; then
  85. hadoopm="${CHANGED_MODULES[*]}"
  86. elif [[ ${ordering} = union ]]; then
  87. hadoopm="${CHANGED_UNION_MODULES}"
  88. elif [[ ${ordering} = mvnsrc ]]; then
  89. hadoopm="${MAVEN_SRC_MODULES[*]}"
  90. elif [[ ${ordering} = mvnsrctest ]]; then
  91. hadoopm="${MAVEN_SRCTEST_MODULES[*]}"
  92. else
  93. hadoopm="${ordering}"
  94. fi
  95. echo "${hadoopm}"
  96. }
  97. ## @description Retrieves the Hadoop project version defined in the root pom.xml
  98. ## @audience private
  99. ## @stability evolving
  100. ## @returns 0 on success, 1 on failure
  101. function load_hadoop_version
  102. {
  103. if [[ -f "${BASEDIR}/pom.xml" ]]; then
  104. HADOOP_VERSION=$(grep '<version>' "${BASEDIR}/pom.xml" \
  105. | head -1 \
  106. | "${SED}" -e 's|^ *<version>||' -e 's|</version>.*$||' \
  107. | cut -f1 -d- )
  108. return 0
  109. else
  110. return 1
  111. fi
  112. }
  113. ## @description Determine if it is safe to run parallel tests
  114. ## @audience private
  115. ## @stability evolving
  116. ## @param ordering
  117. function hadoop_test_parallel
  118. {
  119. if load_hadoop_version; then
  120. export HADOOP_VERSION
  121. else
  122. return 1
  123. fi
  124. hmajor=${HADOOP_VERSION%%\.*}
  125. hmajorminor=${HADOOP_VERSION%\.*}
  126. hminor=${hmajorminor##*\.}
  127. # ... and just for reference
  128. #hmicro=${HADOOP_VERSION##*\.}
  129. # Apache Hadoop v2.8.0 was the first one to really
  130. # get working parallel unit tests
  131. if [[ ${hmajor} -lt 3 && ${hminor} -lt 8 ]]; then
  132. return 1
  133. fi
  134. return 0
  135. }
  136. ## @description Install extra modules for unit tests
  137. ## @audience private
  138. ## @stability evolving
  139. ## @param ordering
  140. function hadoop_unittest_prereqs
  141. {
  142. declare input=$1
  143. declare mods
  144. declare need_common=0
  145. declare building_common=0
  146. declare module
  147. declare flags
  148. declare fn
  149. # prior to running unit tests, hdfs needs libhadoop.so built
  150. # if we're building root, then this extra work is moot
  151. #shellcheck disable=SC2086
  152. mods=$(hadoop_order ${input})
  153. for module in ${mods}; do
  154. if [[ ${module} = hadoop-hdfs-project* ]]; then
  155. need_common=1
  156. elif [[ ${module} = hadoop-common-project/hadoop-common
  157. || ${module} = hadoop-common-project ]]; then
  158. building_common=1
  159. elif [[ ${module} = . ]]; then
  160. return
  161. fi
  162. done
  163. # Windows builds *ALWAYS* need hadoop-common compiled
  164. case ${OSTYPE} in
  165. Windows_NT|CYGWIN*|MINGW*|MSYS*)
  166. need_common=1
  167. ;;
  168. esac
  169. if [[ ${need_common} -eq 1
  170. && ${building_common} -eq 0 ]]; then
  171. echo "unit test pre-reqs:"
  172. module="hadoop-common-project/hadoop-common"
  173. fn=$(module_file_fragment "${module}")
  174. flags="$(hadoop_native_flags) $(yarn_ui2_flag)"
  175. pushd "${BASEDIR}/${module}" >/dev/null || return 1
  176. # shellcheck disable=SC2086
  177. echo_and_redirect "${PATCH_DIR}/maven-unit-prereq-${fn}-install.txt" \
  178. "${MAVEN}" "${MAVEN_ARGS[@]}" install -DskipTests ${flags}
  179. popd >/dev/null || return 1
  180. fi
  181. }
  182. ## @description Calculate the flags/settings for yarn-ui v2 build
  183. ## @description based upon the OS
  184. ## @audience private
  185. ## @stability evolving
  186. function yarn_ui2_flag
  187. {
  188. if [[ ${BUILD_NATIVE} != true ]]; then
  189. return
  190. fi
  191. # Now it only tested on Linux/OSX, don't enable the profile on
  192. # windows until it get verified
  193. case ${OSTYPE} in
  194. Linux)
  195. # shellcheck disable=SC2086
  196. echo -Pyarn-ui
  197. ;;
  198. Darwin)
  199. echo -Pyarn-ui
  200. ;;
  201. *)
  202. # Do nothing
  203. ;;
  204. esac
  205. }
  206. ## @description Calculate the flags/settings for native code
  207. ## @description based upon the OS
  208. ## @audience private
  209. ## @stability evolving
  210. function hadoop_native_flags
  211. {
  212. if [[ ${BUILD_NATIVE} != true ]]; then
  213. return
  214. fi
  215. declare -a args
  216. # Based upon HADOOP-11937
  217. #
  218. # Some notes:
  219. #
  220. # - getting fuse to compile on anything but Linux
  221. # is always tricky.
  222. # - Darwin assumes homebrew is in use.
  223. # - HADOOP-12027 required for bzip2 on OS X.
  224. # - bzip2 is broken in lots of places
  225. # (the shared library is considered experimental)
  226. # e.g, HADOOP-12027 for OS X. so no -Drequire.bzip2
  227. #
  228. args=("-Drequire.test.libhadoop")
  229. if [[ -d "${ISAL_HOME}/include" ]]; then
  230. args=("${args[@]}" "-Disal.prefix=${ISAL_HOME}")
  231. fi
  232. if [[ -d "${OPENSSL_HOME}/include" ]]; then
  233. args=("${args[@]}" "-Dopenssl.prefix=${OPENSSL_HOME}")
  234. elif [[ -d "${HADOOP_HOMEBREW_DIR}/opt/openssl/" ]]; then
  235. args=("${args[@]}" "-Dopenssl.prefix=${HADOOP_HOMEBREW_DIR}/opt/openssl/")
  236. fi
  237. if [[ -d "${SNAPPY_HOME}/include" ]]; then
  238. args=("${args[@]}" "-Dsnappy.prefix=${SNAPPY_HOME}")
  239. elif [[ -d "${HADOOP_HOMEBREW_DIR}/include/snappy.h" ]]; then
  240. args=("${args[@]}" "-Dsnappy.prefix=${HADOOP_HOMEBREW_DIR}/opt/snappy")
  241. fi
  242. case ${OSTYPE} in
  243. Linux)
  244. # shellcheck disable=SC2086
  245. echo \
  246. -Pnative \
  247. -Drequire.fuse \
  248. -Drequire.openssl \
  249. -Drequire.snappy \
  250. -Drequire.valgrind \
  251. -Drequire.zstd \
  252. "${args[@]}"
  253. ;;
  254. Darwin)
  255. echo \
  256. "${args[@]}" \
  257. -Pnative \
  258. -Drequire.snappy \
  259. -Drequire.openssl
  260. ;;
  261. Windows_NT|CYGWIN*|MINGW*|MSYS*)
  262. echo \
  263. "${args[@]}" \
  264. -Drequire.snappy \
  265. -Pdist \
  266. -Dtar \
  267. "${WINDOWS_FLAGS[@]}"
  268. ;;
  269. *)
  270. echo \
  271. "${args[@]}"
  272. ;;
  273. esac
  274. }
  275. ## @description Queue up modules for this personality
  276. ## @audience private
  277. ## @stability evolving
  278. ## @param repostatus
  279. ## @param testtype
  280. function personality_modules
  281. {
  282. declare repostatus=$1
  283. declare testtype=$2
  284. declare extra=""
  285. declare ordering="normal"
  286. declare needflags=false
  287. declare foundbats=false
  288. declare flags
  289. declare fn
  290. declare i
  291. declare hadoopm
  292. yetus_debug "Personality: ${repostatus} ${testtype}"
  293. clear_personality_queue
  294. case ${testtype} in
  295. asflicense)
  296. # this is very fast and provides the full path if we do it from
  297. # the root of the source
  298. personality_enqueue_module .
  299. return
  300. ;;
  301. checkstyle)
  302. ordering="union"
  303. extra="-DskipTests"
  304. ;;
  305. compile)
  306. ordering="union"
  307. extra="-DskipTests"
  308. needflags=true
  309. # if something in common changed, we build the whole world
  310. if [[ "${CHANGED_MODULES[*]}" =~ hadoop-common ]]; then
  311. yetus_debug "hadoop personality: javac + hadoop-common = ordering set to . "
  312. ordering="."
  313. fi
  314. ;;
  315. distclean)
  316. ordering="."
  317. extra="-DskipTests"
  318. ;;
  319. javadoc)
  320. if [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
  321. ordering=.
  322. fi
  323. if [[ "${repostatus}" = patch && "${BUILDMODE}" = patch ]]; then
  324. echo "javadoc pre-reqs:"
  325. for i in hadoop-project \
  326. hadoop-common-project/hadoop-annotations; do
  327. fn=$(module_file_fragment "${i}")
  328. pushd "${BASEDIR}/${i}" >/dev/null || return 1
  329. echo "cd ${i}"
  330. echo_and_redirect "${PATCH_DIR}/maven-${fn}-install.txt" \
  331. "${MAVEN}" "${MAVEN_ARGS[@]}" install
  332. popd >/dev/null || return 1
  333. done
  334. fi
  335. extra="-Pdocs -DskipTests"
  336. ;;
  337. mvneclipse)
  338. if [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
  339. ordering=.
  340. fi
  341. ;;
  342. mvninstall)
  343. extra="-DskipTests"
  344. if [[ "${repostatus}" = branch || "${BUILDMODE}" = full ]]; then
  345. ordering=.
  346. fi
  347. ;;
  348. mvnsite)
  349. if [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
  350. ordering=.
  351. fi
  352. ;;
  353. unit)
  354. if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 && (-z "$IS_NIGHTLY_BUILD" || "$IS_NIGHTLY_BUILD" == 0) ]]; then
  355. echo "Won't run unit tests for Windows in pre-commit CI"
  356. return
  357. fi
  358. extra="-Dsurefire.rerunFailingTestsCount=2"
  359. if [[ "${BUILDMODE}" = full ]]; then
  360. ordering=mvnsrc
  361. elif [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
  362. ordering=.
  363. fi
  364. if [[ ${TEST_PARALLEL} = "true" ]] ; then
  365. if hadoop_test_parallel; then
  366. extra="${extra} -Pparallel-tests"
  367. if [[ -n ${TEST_THREADS:-} ]]; then
  368. extra="${extra} -DtestsThreadCount=${TEST_THREADS}"
  369. fi
  370. fi
  371. fi
  372. needflags=true
  373. hadoop_unittest_prereqs "${ordering}"
  374. if ! verify_needed_test javac; then
  375. yetus_debug "hadoop: javac not requested"
  376. if ! verify_needed_test native; then
  377. yetus_debug "hadoop: native not requested"
  378. yetus_debug "hadoop: adding -DskipTests to unit test"
  379. extra="-DskipTests"
  380. fi
  381. fi
  382. for i in "${CHANGED_FILES[@]}"; do
  383. if [[ "${i}" =~ \.bats ]]; then
  384. foundbats=true
  385. fi
  386. done
  387. if ! verify_needed_test shellcheck && [[ ${foundbats} = false ]]; then
  388. yetus_debug "hadoop: NO shell code change detected; disabling shelltest profile"
  389. extra="${extra} -P!shelltest"
  390. else
  391. extra="${extra} -Pshelltest"
  392. fi
  393. ;;
  394. *)
  395. extra="-DskipTests"
  396. ;;
  397. esac
  398. if [[ ${needflags} = true ]]; then
  399. flags="$(hadoop_native_flags) $(yarn_ui2_flag)"
  400. extra="${extra} ${flags}"
  401. fi
  402. if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
  403. extra="-Ptest-patch -Pdist -Dtar ${WINDOWS_FLAGS[*]} ${extra}"
  404. fi
  405. for module in $(hadoop_order ${ordering}); do
  406. # shellcheck disable=SC2086
  407. personality_enqueue_module ${module} ${extra}
  408. done
  409. }
  410. ## @description Add tests based upon personality needs
  411. ## @audience private
  412. ## @stability evolving
  413. ## @param filename
  414. function personality_file_tests
  415. {
  416. declare filename=$1
  417. yetus_debug "Using Hadoop-specific personality_file_tests"
  418. if [[ ${filename} =~ src/main/webapp ]]; then
  419. yetus_debug "tests/webapp: ${filename}"
  420. add_test shadedclient
  421. elif [[ ${filename} =~ \.sh
  422. || ${filename} =~ \.cmd
  423. || ${filename} =~ src/scripts
  424. || ${filename} =~ src/test/scripts
  425. || ${filename} =~ src/main/bin
  426. || ${filename} =~ shellprofile\.d
  427. || ${filename} =~ src/main/conf
  428. ]]; then
  429. yetus_debug "tests/shell: ${filename}"
  430. add_test mvnsite
  431. add_test unit
  432. elif [[ ${filename} =~ \.md$
  433. || ${filename} =~ \.md\.vm$
  434. || ${filename} =~ src/site
  435. ]]; then
  436. yetus_debug "tests/site: ${filename}"
  437. add_test mvnsite
  438. elif [[ ${filename} =~ \.c$
  439. || ${filename} =~ \.cc$
  440. || ${filename} =~ \.h$
  441. || ${filename} =~ \.hh$
  442. || ${filename} =~ \.proto$
  443. || ${filename} =~ \.cmake$
  444. || ${filename} =~ CMakeLists.txt
  445. ]]; then
  446. yetus_debug "tests/units: ${filename}"
  447. add_test compile
  448. add_test cc
  449. add_test mvnsite
  450. add_test javac
  451. add_test unit
  452. elif [[ ${filename} =~ build.xml$
  453. || ${filename} =~ pom.xml$
  454. || ${filename} =~ \.java$
  455. || ${filename} =~ src/main
  456. ]]; then
  457. yetus_debug "tests/javadoc+units: ${filename}"
  458. add_test compile
  459. add_test javac
  460. add_test javadoc
  461. add_test mvninstall
  462. add_test mvnsite
  463. add_test unit
  464. add_test shadedclient
  465. fi
  466. # if we change anything in here, e.g. the test scripts
  467. # then run the client artifact tests
  468. if [[ ${filename} =~ hadoop-client-modules ]]; then
  469. add_test shadedclient
  470. fi
  471. if [[ ${filename} =~ src/test ]]; then
  472. yetus_debug "tests: src/test"
  473. add_test unit
  474. fi
  475. if [[ ${filename} =~ \.java$ ]]; then
  476. add_test spotbugs
  477. fi
  478. }
  479. ## @description Image to print on success
  480. ## @audience private
  481. ## @stability evolving
  482. function hadoop_console_success
  483. {
  484. printf "IF9fX19fX19fX18gCjwgU3VjY2VzcyEgPgogLS0tLS0tLS0tLSAKIFwgICAg";
  485. printf "IC9cICBfX18gIC9cCiAgXCAgIC8vIFwvICAgXC8gXFwKICAgICAoKCAgICBP";
  486. printf "IE8gICAgKSkKICAgICAgXFwgLyAgICAgXCAvLwogICAgICAgXC8gIHwgfCAg";
  487. printf "XC8gCiAgICAgICAgfCAgfCB8ICB8ICAKICAgICAgICB8ICB8IHwgIHwgIAog";
  488. printf "ICAgICAgIHwgICBvICAgfCAgCiAgICAgICAgfCB8ICAgfCB8ICAKICAgICAg";
  489. printf "ICB8bXwgICB8bXwgIAo"
  490. }
  491. ###################################################
  492. # Hadoop project specific check of IT for shaded artifacts
  493. add_test_type shadedclient
  494. ## @description check for test modules and add test/plugins as needed
  495. ## @audience private
  496. ## @stability evolving
  497. function shadedclient_initialize
  498. {
  499. maven_add_install shadedclient
  500. }
  501. ## @description build client facing shaded and non-shaded artifacts and test them
  502. ## @audience private
  503. ## @stability evolving
  504. ## @param repostatus
  505. function shadedclient_rebuild
  506. {
  507. declare repostatus=$1
  508. declare logfile="${PATCH_DIR}/${repostatus}-shadedclient.txt"
  509. declare module
  510. declare -a modules=()
  511. yetus_debug "hadoop personality: seeing if we need the test of client artifacts."
  512. for module in hadoop-client-modules/hadoop-client-check-invariants \
  513. hadoop-client-modules/hadoop-client-check-test-invariants \
  514. hadoop-client-modules/hadoop-client-integration-tests; do
  515. if [ -d "${module}" ]; then
  516. yetus_debug "hadoop personality: test module '${module}' is present."
  517. modules+=(-pl "${module}")
  518. fi
  519. done
  520. if [ ${#modules[@]} -eq 0 ]; then
  521. echo "hadoop personality: no test modules present, skipping check of client artifacts."
  522. return 0
  523. fi
  524. big_console_header "Checking client artifacts on ${repostatus} with shaded clients"
  525. extra=(
  526. "-Dtest=NoUnitTests"
  527. "-Dmaven.javadoc.skip=true"
  528. "-Dcheckstyle.skip=true"
  529. "-Dspotbugs.skip=true"
  530. )
  531. if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
  532. # shellcheck disable=SC2206
  533. extra+=(${WINDOWS_FLAGS[*]})
  534. # The shaded client integration tests require the Hadoop jars that were just built to be
  535. # installed in the local maven repository.
  536. # shellcheck disable=SC2086
  537. echo_and_redirect "${logfile}" \
  538. "${MAVEN}" "${MAVEN_ARGS[@]}" install -fae --batch-mode \
  539. -DskipTests -DskipDocs -Pdist -Dtar ${extra[*]}
  540. # The shaded client integration tests spawn a MiniDFS and MiniYARN cluster for testing. Both of
  541. # them require winutils.exe to be found in the PATH and HADOOP_HOME to be set.
  542. if load_hadoop_version; then
  543. export HADOOP_HOME="${SOURCEDIR}/hadoop-dist/target/hadoop-${HADOOP_VERSION}-SNAPSHOT"
  544. WIN_HADOOP_HOME=$(cygpath -w -a "${HADOOP_HOME}")
  545. export PATH="${PATH};${WIN_HADOOP_HOME}\bin"
  546. else
  547. yetus_error "[WARNING] Unable to extract the Hadoop version and thus HADOOP_HOME is not set. Some tests may fail."
  548. fi
  549. fi
  550. # shellcheck disable=SC2086
  551. echo_and_redirect "${logfile}" \
  552. "${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am "${modules[@]}" ${extra[*]}
  553. big_console_header "Checking client artifacts on ${repostatus} with non-shaded clients"
  554. # shellcheck disable=SC2086
  555. echo_and_redirect "${logfile}" \
  556. "${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am \
  557. "${modules[@]}" \
  558. -DskipShade -Dtest=NoUnitTests -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true \
  559. -Dspotbugs.skip=true ${extra[*]}
  560. count=$("${GREP}" -c '\[ERROR\]' "${logfile}")
  561. if [[ ${count} -gt 0 ]]; then
  562. add_vote_table -1 shadedclient "${repostatus} has errors when building and testing our client artifacts."
  563. return 1
  564. fi
  565. add_vote_table +1 shadedclient "${repostatus} has no errors when building and testing our client artifacts."
  566. return 0
  567. }