hadoop-vote.sh 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. The ASF licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing,
  13. # software distributed under the License is distributed on an
  14. # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  15. # KIND, either express or implied. See the License for the
  16. # specific language governing permissions and limitations
  17. # under the License.
  18. # This script is useful to perform basic sanity tests for the given
  19. # Hadoop RC. It checks for the Checksum, Signature, Rat check,
  20. # Build from source and building tarball from the source.
  21. set -e -o pipefail
  22. usage() {
  23. SCRIPT=$(basename "${BASH_SOURCE[@]}")
  24. cat << __EOF
  25. hadoop-vote. A script for standard vote which verifies the following items
  26. 1. Checksum of sources and binaries
  27. 2. Signature of sources and binaries
  28. 3. Rat check
  29. 4. Built from source
  30. 5. Built tar from source
  31. Usage: ${SCRIPT} -s | --source <url> [-k | --key <signature>] [-f | --keys-file-url <url>] [-o | --output-dir </path/to/use>] [-D property[=value]] [-P profiles]
  32. ${SCRIPT} -h | --help
  33. -h | --help Show this screen.
  34. -s | --source '<url>' A URL pointing to the release candidate sources and binaries
  35. e.g. https://dist.apache.org/repos/dist/dev/hadoop/hadoop-<version>RC0/
  36. -k | --key '<signature>' A signature of the public key, e.g. 9AD2AE49
  37. -f | --keys-file-url '<url>' the URL of the key file, default is
  38. https://downloads.apache.org/hadoop/common/KEYS
  39. -o | --output-dir '</path>' directory which has the stdout and stderr of each verification target
  40. -D | list of maven properties to set for the mvn invocations, e.g. <-D hbase.profile=2.0 -D skipTests> Defaults to unset
  41. -P | list of maven profiles to set for the build from source, e.g. <-P native -P yarn-ui>
  42. __EOF
  43. }
  44. MVN_PROPERTIES=()
  45. MVN_PROFILES=()
  46. while ((${#})); do
  47. case "${1}" in
  48. -h | --help )
  49. usage; exit 0 ;;
  50. -s | --source )
  51. SOURCE_URL="${2}"; shift 2 ;;
  52. -k | --key )
  53. SIGNING_KEY="${2}"; shift 2 ;;
  54. -f | --keys-file-url )
  55. KEY_FILE_URL="${2}"; shift 2 ;;
  56. -o | --output-dir )
  57. OUTPUT_DIR="${2}"; shift 2 ;;
  58. -D )
  59. MVN_PROPERTIES+=("-D ${2}"); shift 2 ;;
  60. -P )
  61. MVN_PROFILES+=("-P ${2}"); shift 2 ;;
  62. * )
  63. usage >&2; exit 1 ;;
  64. esac
  65. done
  66. # Source url must be provided
  67. if [ -z "${SOURCE_URL}" ]; then
  68. usage;
  69. exit 1
  70. fi
  71. cat << __EOF
  72. Although This tool helps verifying Hadoop RC build and unit tests,
  73. operator may still consider verifying the following manually:
  74. 1. Verify the API compatibility report
  75. 2. Integration/performance/benchmark tests
  76. 3. Object store specific Integration tests against an endpoint
  77. 4. Verify overall unit test stability from Jenkins builds or locally
  78. 5. Other concerns if any
  79. __EOF
  80. [[ "${SOURCE_URL}" != */ ]] && SOURCE_URL="${SOURCE_URL}/"
  81. HADOOP_RC_VERSION=$(tr "/" "\n" <<< "${SOURCE_URL}" | tail -n2)
  82. HADOOP_VERSION=$(echo "${HADOOP_RC_VERSION}" | sed -e 's/-RC[0-9]//g' | sed -e 's/hadoop-//g')
  83. JAVA_VERSION=$(java -version 2>&1 | cut -f3 -d' ' | head -n1 | sed -e 's/"//g')
  84. OUTPUT_DIR="${OUTPUT_DIR:-$(pwd)}"
  85. if [ ! -d "${OUTPUT_DIR}" ]; then
  86. echo "Output directory ${OUTPUT_DIR} does not exist, please create it before running this script."
  87. exit 1
  88. fi
  89. OUTPUT_PATH_PREFIX="${OUTPUT_DIR}"/"${HADOOP_RC_VERSION}"
  90. # default value for verification targets, 0 = failed
  91. SIGNATURE_PASSED=0
  92. CHECKSUM_PASSED=0
  93. RAT_CHECK_PASSED=0
  94. BUILD_FROM_SOURCE_PASSED=0
  95. BUILD_TAR_FROM_SOURCE_PASSED=0
  96. function download_and_import_keys() {
  97. KEY_FILE_URL="${KEY_FILE_URL:-https://downloads.apache.org/hadoop/common/KEYS}"
  98. echo "Obtain and import the publisher key(s) from ${KEY_FILE_URL}"
  99. # download the keys file into file KEYS
  100. wget -O KEYS "${KEY_FILE_URL}"
  101. gpg --import KEYS
  102. if [ -n "${SIGNING_KEY}" ]; then
  103. gpg --list-keys "${SIGNING_KEY}"
  104. fi
  105. }
  106. function download_release_candidate () {
  107. # get all files from release candidate repo
  108. wget -r -np -N -nH --cut-dirs 4 "${SOURCE_URL}"
  109. }
  110. function verify_signatures() {
  111. rm -f "${OUTPUT_PATH_PREFIX}"_verify_signatures
  112. for file in *.tar.gz; do
  113. gpg --verify "${file}".asc "${file}" 2>&1 | tee -a "${OUTPUT_PATH_PREFIX}"_verify_signatures && SIGNATURE_PASSED=1 || SIGNATURE_PASSED=0
  114. done
  115. }
  116. function verify_checksums() {
  117. rm -f "${OUTPUT_PATH_PREFIX}"_verify_checksums
  118. SHA_EXT=$(find . -name "*.sha*" | awk -F '.' '{ print $NF }' | head -n 1)
  119. for file in *.tar.gz; do
  120. sha512sum --tag "${file}" > "${file}"."${SHA_EXT}".tmp
  121. diff "${file}"."${SHA_EXT}".tmp "${file}"."${SHA_EXT}" 2>&1 | tee -a "${OUTPUT_PATH_PREFIX}"_verify_checksums && CHECKSUM_PASSED=1 || CHECKSUM_PASSED=0
  122. rm -f "${file}"."${SHA_EXT}".tmp
  123. done
  124. }
  125. function unzip_from_source() {
  126. tar -zxvf hadoop-"${HADOOP_VERSION}"-src.tar.gz
  127. cd hadoop-"${HADOOP_VERSION}"-src
  128. }
  129. function rat_test() {
  130. rm -f "${OUTPUT_PATH_PREFIX}"_rat_test
  131. mvn clean apache-rat:check "${MVN_PROPERTIES[@]}" 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_rat_test && RAT_CHECK_PASSED=1
  132. }
  133. function build_from_source() {
  134. rm -f "${OUTPUT_PATH_PREFIX}"_build_from_source
  135. # No unit test run.
  136. mvn clean install "${MVN_PROPERTIES[@]}" -DskipTests "${MVN_PROFILES[@]}" 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_build_from_source && BUILD_FROM_SOURCE_PASSED=1
  137. }
  138. function build_tar_from_source() {
  139. rm -f "${OUTPUT_PATH_PREFIX}"_build_tar_from_source
  140. # No unit test run.
  141. mvn clean package "${MVN_PROPERTIES[@]}" -Pdist -DskipTests -Dtar -Dmaven.javadoc.skip=true 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_build_tar_from_source && BUILD_TAR_FROM_SOURCE_PASSED=1
  142. }
  143. function execute() {
  144. ${1} || print_when_exit
  145. }
  146. function print_when_exit() {
  147. cat << __EOF
  148. * Signature: $( ((SIGNATURE_PASSED)) && echo "ok" || echo "failed" )
  149. * Checksum : $( ((CHECKSUM_PASSED)) && echo "ok" || echo "failed" )
  150. * Rat check (${JAVA_VERSION}): $( ((RAT_CHECK_PASSED)) && echo "ok" || echo "failed" )
  151. - mvn clean apache-rat:check ${MVN_PROPERTIES[@]}
  152. * Built from source (${JAVA_VERSION}): $( ((BUILD_FROM_SOURCE_PASSED)) && echo "ok" || echo "failed" )
  153. - mvn clean install ${MVN_PROPERTIES[@]} -DskipTests ${MVN_PROFILES[@]}
  154. * Built tar from source (${JAVA_VERSION}): $( ((BUILD_TAR_FROM_SOURCE_PASSED)) && echo "ok" || echo "failed" )
  155. - mvn clean package ${MVN_PROPERTIES[@]} -Pdist -DskipTests -Dtar -Dmaven.javadoc.skip=true
  156. __EOF
  157. if ((CHECKSUM_PASSED)) && ((SIGNATURE_PASSED)) && ((RAT_CHECK_PASSED)) && ((BUILD_FROM_SOURCE_PASSED)) && ((BUILD_TAR_FROM_SOURCE_PASSED)) ; then
  158. exit 0
  159. fi
  160. exit 1
  161. }
  162. pushd "${OUTPUT_DIR}"
  163. download_and_import_keys
  164. download_release_candidate
  165. pushd "${HADOOP_RC_VERSION}"
  166. execute verify_signatures
  167. execute verify_checksums
  168. execute unzip_from_source
  169. execute rat_test
  170. execute build_from_source
  171. execute build_tar_from_source
  172. popd
  173. popd
  174. print_when_exit