123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633 |
- #!/usr/bin/env bash
- # Licensed to the Apache Software Foundation (ASF) under one or more
- # contributor license agreements. See the NOTICE file distributed with
- # this work for additional information regarding copyright ownership.
- # The ASF licenses this file to You under the Apache License, Version 2.0
- # (the "License"); you may not use this file except in compliance with
- # the License. You may obtain a copy of the License at
- #
- # http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #
- # SHELLDOC-IGNORE
- #
- # Override these to match Apache Hadoop's requirements
- personality_plugins "all,-ant,-gradle,-scalac,-scaladoc"
- # These flags are needed to run Yetus against Hadoop on Windows.
- WINDOWS_FLAGS=(
- "-Pnative-win"
- "-Dhttps.protocols=TLSv1.2"
- "-Drequire.openssl"
- "-Drequire.test.libhadoop"
- "-Dshell-executable=${BASH_EXECUTABLE}"
- "-Dopenssl.prefix=${VCPKG_INSTALLED_PACKAGES}"
- "-Dcmake.prefix.path=${VCPKG_INSTALLED_PACKAGES}"
- "-Dwindows.cmake.toolchain.file=${CMAKE_TOOLCHAIN_FILE}"
- "-Dwindows.cmake.build.type=RelWithDebInfo"
- "-Dwindows.build.hdfspp.dll=off"
- "-Dwindows.no.sasl=on"
- "-Duse.platformToolsetVersion=v142"
- )
- ## @description Globals specific to this personality
- ## @audience private
- ## @stability evolving
- function personality_globals
- {
- # shellcheck disable=SC2034
- BUILDTOOL=maven
- #shellcheck disable=SC2034
- PATCH_BRANCH_DEFAULT=trunk
- #shellcheck disable=SC2034
- PATCH_NAMING_RULE="https://cwiki.apache.org/confluence/display/HADOOP/How+To+Contribute"
- #shellcheck disable=SC2034
- JIRA_ISSUE_RE='^(HADOOP|YARN|MAPREDUCE|HDFS)-[0-9]+$'
- #shellcheck disable=SC2034
- GITHUB_REPO_DEFAULT="apache/hadoop"
- HADOOP_HOMEBREW_DIR=${HADOOP_HOMEBREW_DIR:-$(brew --prefix 2>/dev/null)}
- if [[ -z "${HADOOP_HOMEBREW_DIR}" ]]; then
- HADOOP_HOMEBREW_DIR=/usr/local
- fi
- }
- function personality_parse_args
- {
- declare i
- for i in "$@"; do
- case ${i} in
- --hadoop-isal-prefix=*)
- delete_parameter "${i}"
- ISAL_HOME=${i#*=}
- ;;
- --hadoop-openssl-prefix=*)
- delete_parameter "${i}"
- OPENSSL_HOME=${i#*=}
- ;;
- --hadoop-snappy-prefix=*)
- delete_parameter "${i}"
- SNAPPY_HOME=${i#*=}
- ;;
- esac
- done
- }
- ## @description Calculate the actual module ordering
- ## @audience private
- ## @stability evolving
- ## @param ordering
- function hadoop_order
- {
- declare ordering=$1
- declare hadoopm
- if [[ ${ordering} = normal ]]; then
- hadoopm="${CHANGED_MODULES[*]}"
- elif [[ ${ordering} = union ]]; then
- hadoopm="${CHANGED_UNION_MODULES}"
- elif [[ ${ordering} = mvnsrc ]]; then
- hadoopm="${MAVEN_SRC_MODULES[*]}"
- elif [[ ${ordering} = mvnsrctest ]]; then
- hadoopm="${MAVEN_SRCTEST_MODULES[*]}"
- else
- hadoopm="${ordering}"
- fi
- echo "${hadoopm}"
- }
- ## @description Retrieves the Hadoop project version defined in the root pom.xml
- ## @audience private
- ## @stability evolving
- ## @returns 0 on success, 1 on failure
- function load_hadoop_version
- {
- if [[ -f "${BASEDIR}/pom.xml" ]]; then
- HADOOP_VERSION=$(grep '<version>' "${BASEDIR}/pom.xml" \
- | head -1 \
- | "${SED}" -e 's|^ *<version>||' -e 's|</version>.*$||' \
- | cut -f1 -d- )
- return 0
- else
- return 1
- fi
- }
- ## @description Determine if it is safe to run parallel tests
- ## @audience private
- ## @stability evolving
- ## @param ordering
- function hadoop_test_parallel
- {
- if load_hadoop_version; then
- export HADOOP_VERSION
- else
- return 1
- fi
- hmajor=${HADOOP_VERSION%%\.*}
- hmajorminor=${HADOOP_VERSION%\.*}
- hminor=${hmajorminor##*\.}
- # ... and just for reference
- #hmicro=${HADOOP_VERSION##*\.}
- # Apache Hadoop v2.8.0 was the first one to really
- # get working parallel unit tests
- if [[ ${hmajor} -lt 3 && ${hminor} -lt 8 ]]; then
- return 1
- fi
- return 0
- }
- ## @description Install extra modules for unit tests
- ## @audience private
- ## @stability evolving
- ## @param ordering
- function hadoop_unittest_prereqs
- {
- declare input=$1
- declare mods
- declare need_common=0
- declare building_common=0
- declare module
- declare flags
- declare fn
- # prior to running unit tests, hdfs needs libhadoop.so built
- # if we're building root, then this extra work is moot
- #shellcheck disable=SC2086
- mods=$(hadoop_order ${input})
- for module in ${mods}; do
- if [[ ${module} = hadoop-hdfs-project* ]]; then
- need_common=1
- elif [[ ${module} = hadoop-common-project/hadoop-common
- || ${module} = hadoop-common-project ]]; then
- building_common=1
- elif [[ ${module} = . ]]; then
- return
- fi
- done
- # Windows builds *ALWAYS* need hadoop-common compiled
- case ${OSTYPE} in
- Windows_NT|CYGWIN*|MINGW*|MSYS*)
- need_common=1
- ;;
- esac
- if [[ ${need_common} -eq 1
- && ${building_common} -eq 0 ]]; then
- echo "unit test pre-reqs:"
- module="hadoop-common-project/hadoop-common"
- fn=$(module_file_fragment "${module}")
- flags="$(hadoop_native_flags) $(yarn_ui2_flag)"
- pushd "${BASEDIR}/${module}" >/dev/null || return 1
- # shellcheck disable=SC2086
- echo_and_redirect "${PATCH_DIR}/maven-unit-prereq-${fn}-install.txt" \
- "${MAVEN}" "${MAVEN_ARGS[@]}" install -DskipTests ${flags}
- popd >/dev/null || return 1
- fi
- }
- ## @description Calculate the flags/settings for yarn-ui v2 build
- ## @description based upon the OS
- ## @audience private
- ## @stability evolving
- function yarn_ui2_flag
- {
- if [[ ${BUILD_NATIVE} != true ]]; then
- return
- fi
- # Now it only tested on Linux/OSX, don't enable the profile on
- # windows until it get verified
- case ${OSTYPE} in
- Linux)
- # shellcheck disable=SC2086
- echo -Pyarn-ui
- ;;
- Darwin)
- echo -Pyarn-ui
- ;;
- *)
- # Do nothing
- ;;
- esac
- }
- ## @description Calculate the flags/settings for native code
- ## @description based upon the OS
- ## @audience private
- ## @stability evolving
- function hadoop_native_flags
- {
- if [[ ${BUILD_NATIVE} != true ]]; then
- return
- fi
- declare -a args
- # Based upon HADOOP-11937
- #
- # Some notes:
- #
- # - getting fuse to compile on anything but Linux
- # is always tricky.
- # - Darwin assumes homebrew is in use.
- # - HADOOP-12027 required for bzip2 on OS X.
- # - bzip2 is broken in lots of places
- # (the shared library is considered experimental)
- # e.g, HADOOP-12027 for OS X. so no -Drequire.bzip2
- #
- args=("-Drequire.test.libhadoop")
- if [[ -d "${ISAL_HOME}/include" ]]; then
- args=("${args[@]}" "-Disal.prefix=${ISAL_HOME}")
- fi
- if [[ -d "${OPENSSL_HOME}/include" ]]; then
- args=("${args[@]}" "-Dopenssl.prefix=${OPENSSL_HOME}")
- elif [[ -d "${HADOOP_HOMEBREW_DIR}/opt/openssl/" ]]; then
- args=("${args[@]}" "-Dopenssl.prefix=${HADOOP_HOMEBREW_DIR}/opt/openssl/")
- fi
- if [[ -d "${SNAPPY_HOME}/include" ]]; then
- args=("${args[@]}" "-Dsnappy.prefix=${SNAPPY_HOME}")
- elif [[ -d "${HADOOP_HOMEBREW_DIR}/include/snappy.h" ]]; then
- args=("${args[@]}" "-Dsnappy.prefix=${HADOOP_HOMEBREW_DIR}/opt/snappy")
- fi
- case ${OSTYPE} in
- Linux)
- # shellcheck disable=SC2086
- echo \
- -Pnative \
- -Drequire.fuse \
- -Drequire.openssl \
- -Drequire.snappy \
- -Drequire.valgrind \
- -Drequire.zstd \
- "${args[@]}"
- ;;
- Darwin)
- echo \
- "${args[@]}" \
- -Pnative \
- -Drequire.snappy \
- -Drequire.openssl
- ;;
- Windows_NT|CYGWIN*|MINGW*|MSYS*)
- echo \
- "${args[@]}" \
- -Drequire.snappy \
- -Pdist \
- -Dtar \
- "${WINDOWS_FLAGS[@]}"
- ;;
- *)
- echo \
- "${args[@]}"
- ;;
- esac
- }
- ## @description Queue up modules for this personality
- ## @audience private
- ## @stability evolving
- ## @param repostatus
- ## @param testtype
- function personality_modules
- {
- declare repostatus=$1
- declare testtype=$2
- declare extra=""
- declare ordering="normal"
- declare needflags=false
- declare foundbats=false
- declare flags
- declare fn
- declare i
- declare hadoopm
- yetus_debug "Personality: ${repostatus} ${testtype}"
- clear_personality_queue
- case ${testtype} in
- asflicense)
- # this is very fast and provides the full path if we do it from
- # the root of the source
- personality_enqueue_module .
- return
- ;;
- checkstyle)
- ordering="union"
- extra="-DskipTests"
- ;;
- compile)
- ordering="union"
- extra="-DskipTests"
- needflags=true
- # if something in common changed, we build the whole world
- if [[ "${CHANGED_MODULES[*]}" =~ hadoop-common ]]; then
- yetus_debug "hadoop personality: javac + hadoop-common = ordering set to . "
- ordering="."
- fi
- ;;
- distclean)
- ordering="."
- extra="-DskipTests"
- ;;
- javadoc)
- if [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
- ordering=.
- fi
- if [[ "${repostatus}" = patch && "${BUILDMODE}" = patch ]]; then
- echo "javadoc pre-reqs:"
- for i in hadoop-project \
- hadoop-common-project/hadoop-annotations; do
- fn=$(module_file_fragment "${i}")
- pushd "${BASEDIR}/${i}" >/dev/null || return 1
- echo "cd ${i}"
- echo_and_redirect "${PATCH_DIR}/maven-${fn}-install.txt" \
- "${MAVEN}" "${MAVEN_ARGS[@]}" install
- popd >/dev/null || return 1
- done
- fi
- extra="-Pdocs -DskipTests"
- ;;
- mvneclipse)
- if [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
- ordering=.
- fi
- ;;
- mvninstall)
- extra="-DskipTests"
- if [[ "${repostatus}" = branch || "${BUILDMODE}" = full ]]; then
- ordering=.
- fi
- ;;
- mvnsite)
- if [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
- ordering=.
- fi
- ;;
- unit)
- if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 && (-z "$IS_NIGHTLY_BUILD" || "$IS_NIGHTLY_BUILD" == 0) ]]; then
- echo "Won't run unit tests for Windows in pre-commit CI"
- return
- fi
- extra="-Dsurefire.rerunFailingTestsCount=2"
- if [[ "${BUILDMODE}" = full ]]; then
- ordering=mvnsrc
- elif [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
- ordering=.
- fi
- if [[ ${TEST_PARALLEL} = "true" ]] ; then
- if hadoop_test_parallel; then
- extra="${extra} -Pparallel-tests"
- if [[ -n ${TEST_THREADS:-} ]]; then
- extra="${extra} -DtestsThreadCount=${TEST_THREADS}"
- fi
- fi
- fi
- needflags=true
- hadoop_unittest_prereqs "${ordering}"
- if ! verify_needed_test javac; then
- yetus_debug "hadoop: javac not requested"
- if ! verify_needed_test native; then
- yetus_debug "hadoop: native not requested"
- yetus_debug "hadoop: adding -DskipTests to unit test"
- extra="-DskipTests"
- fi
- fi
- for i in "${CHANGED_FILES[@]}"; do
- if [[ "${i}" =~ \.bats ]]; then
- foundbats=true
- fi
- done
- if ! verify_needed_test shellcheck && [[ ${foundbats} = false ]]; then
- yetus_debug "hadoop: NO shell code change detected; disabling shelltest profile"
- extra="${extra} -P!shelltest"
- else
- extra="${extra} -Pshelltest"
- fi
- ;;
- *)
- extra="-DskipTests"
- ;;
- esac
- if [[ ${needflags} = true ]]; then
- flags="$(hadoop_native_flags) $(yarn_ui2_flag)"
- extra="${extra} ${flags}"
- fi
- if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
- extra="-Ptest-patch -Pdist -Dtar ${WINDOWS_FLAGS[*]} ${extra}"
- fi
- for module in $(hadoop_order ${ordering}); do
- # shellcheck disable=SC2086
- personality_enqueue_module ${module} ${extra}
- done
- }
- ## @description Add tests based upon personality needs
- ## @audience private
- ## @stability evolving
- ## @param filename
- function personality_file_tests
- {
- declare filename=$1
- yetus_debug "Using Hadoop-specific personality_file_tests"
- if [[ ${filename} =~ src/main/webapp ]]; then
- yetus_debug "tests/webapp: ${filename}"
- add_test shadedclient
- elif [[ ${filename} =~ \.sh
- || ${filename} =~ \.cmd
- || ${filename} =~ src/scripts
- || ${filename} =~ src/test/scripts
- || ${filename} =~ src/main/bin
- || ${filename} =~ shellprofile\.d
- || ${filename} =~ src/main/conf
- ]]; then
- yetus_debug "tests/shell: ${filename}"
- add_test mvnsite
- add_test unit
- elif [[ ${filename} =~ \.md$
- || ${filename} =~ \.md\.vm$
- || ${filename} =~ src/site
- ]]; then
- yetus_debug "tests/site: ${filename}"
- add_test mvnsite
- elif [[ ${filename} =~ \.c$
- || ${filename} =~ \.cc$
- || ${filename} =~ \.h$
- || ${filename} =~ \.hh$
- || ${filename} =~ \.proto$
- || ${filename} =~ \.cmake$
- || ${filename} =~ CMakeLists.txt
- ]]; then
- yetus_debug "tests/units: ${filename}"
- add_test compile
- add_test cc
- add_test mvnsite
- add_test javac
- add_test unit
- elif [[ ${filename} =~ build.xml$
- || ${filename} =~ pom.xml$
- || ${filename} =~ \.java$
- || ${filename} =~ src/main
- ]]; then
- yetus_debug "tests/javadoc+units: ${filename}"
- add_test compile
- add_test javac
- add_test javadoc
- add_test mvninstall
- add_test mvnsite
- add_test unit
- add_test shadedclient
- fi
- # if we change anything in here, e.g. the test scripts
- # then run the client artifact tests
- if [[ ${filename} =~ hadoop-client-modules ]]; then
- add_test shadedclient
- fi
- if [[ ${filename} =~ src/test ]]; then
- yetus_debug "tests: src/test"
- add_test unit
- fi
- if [[ ${filename} =~ \.java$ ]]; then
- add_test spotbugs
- fi
- }
- ## @description Image to print on success
- ## @audience private
- ## @stability evolving
- function hadoop_console_success
- {
- printf "IF9fX19fX19fX18gCjwgU3VjY2VzcyEgPgogLS0tLS0tLS0tLSAKIFwgICAg";
- printf "IC9cICBfX18gIC9cCiAgXCAgIC8vIFwvICAgXC8gXFwKICAgICAoKCAgICBP";
- printf "IE8gICAgKSkKICAgICAgXFwgLyAgICAgXCAvLwogICAgICAgXC8gIHwgfCAg";
- printf "XC8gCiAgICAgICAgfCAgfCB8ICB8ICAKICAgICAgICB8ICB8IHwgIHwgIAog";
- printf "ICAgICAgIHwgICBvICAgfCAgCiAgICAgICAgfCB8ICAgfCB8ICAKICAgICAg";
- printf "ICB8bXwgICB8bXwgIAo"
- }
- ###################################################
- # Hadoop project specific check of IT for shaded artifacts
- add_test_type shadedclient
- ## @description check for test modules and add test/plugins as needed
- ## @audience private
- ## @stability evolving
- function shadedclient_initialize
- {
- maven_add_install shadedclient
- }
- ## @description build client facing shaded and non-shaded artifacts and test them
- ## @audience private
- ## @stability evolving
- ## @param repostatus
- function shadedclient_rebuild
- {
- declare repostatus=$1
- declare logfile="${PATCH_DIR}/${repostatus}-shadedclient.txt"
- declare module
- declare -a modules=()
- yetus_debug "hadoop personality: seeing if we need the test of client artifacts."
- for module in hadoop-client-modules/hadoop-client-check-invariants \
- hadoop-client-modules/hadoop-client-check-test-invariants \
- hadoop-client-modules/hadoop-client-integration-tests; do
- if [ -d "${module}" ]; then
- yetus_debug "hadoop personality: test module '${module}' is present."
- modules+=(-pl "${module}")
- fi
- done
- if [ ${#modules[@]} -eq 0 ]; then
- echo "hadoop personality: no test modules present, skipping check of client artifacts."
- return 0
- fi
- big_console_header "Checking client artifacts on ${repostatus} with shaded clients"
- extra=(
- "-Dtest=NoUnitTests"
- "-Dmaven.javadoc.skip=true"
- "-Dcheckstyle.skip=true"
- "-Dspotbugs.skip=true"
- )
- if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
- # shellcheck disable=SC2206
- extra+=(${WINDOWS_FLAGS[*]})
- # The shaded client integration tests require the Hadoop jars that were just built to be
- # installed in the local maven repository.
- # shellcheck disable=SC2086
- echo_and_redirect "${logfile}" \
- "${MAVEN}" "${MAVEN_ARGS[@]}" install -fae --batch-mode \
- -DskipTests -DskipDocs -Pdist -Dtar ${extra[*]}
- # The shaded client integration tests spawn a MiniDFS and MiniYARN cluster for testing. Both of
- # them require winutils.exe to be found in the PATH and HADOOP_HOME to be set.
- if load_hadoop_version; then
- export HADOOP_HOME="${SOURCEDIR}/hadoop-dist/target/hadoop-${HADOOP_VERSION}-SNAPSHOT"
- WIN_HADOOP_HOME=$(cygpath -w -a "${HADOOP_HOME}")
- export PATH="${PATH};${WIN_HADOOP_HOME}\bin"
- else
- yetus_error "[WARNING] Unable to extract the Hadoop version and thus HADOOP_HOME is not set. Some tests may fail."
- fi
- fi
- # shellcheck disable=SC2086
- echo_and_redirect "${logfile}" \
- "${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am "${modules[@]}" ${extra[*]}
- big_console_header "Checking client artifacts on ${repostatus} with non-shaded clients"
- # shellcheck disable=SC2086
- echo_and_redirect "${logfile}" \
- "${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am \
- "${modules[@]}" \
- -DskipShade -Dtest=NoUnitTests -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true \
- -Dspotbugs.skip=true ${extra[*]}
- count=$("${GREP}" -c '\[ERROR\]' "${logfile}")
- if [[ ${count} -gt 0 ]]; then
- add_vote_table -1 shadedclient "${repostatus} has errors when building and testing our client artifacts."
- return 1
- fi
- add_vote_table +1 shadedclient "${repostatus} has no errors when building and testing our client artifacts."
- return 0
- }
|