create-release 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  17. || [[ "${BASH_VERSINFO[0]}" -lt 3 ]] \
  18. || [[ "${BASH_VERSINFO[0]}" -eq 3 && "${BASH_VERSINFO[1]}" -lt 2 ]]; then
  19. echo "bash v3.2+ is required. Sorry."
  20. exit 1
  21. fi
  22. function centered_text
  23. {
  24. local text="$*"
  25. local spacing=$(( (75+${#text}) /2 ))
  26. printf "%*s\n" ${spacing} "${text}"
  27. }
  28. function big_console_header
  29. {
  30. printf "\n\n"
  31. echo "****************************************************************************"
  32. centered_text "${@}"
  33. echo "****************************************************************************"
  34. printf "\n\n"
  35. }
  36. ## @description Given a filename or dir, return the absolute version of it
  37. ## @audience public
  38. ## @stability stable
  39. ## @param directory
  40. ## @replaceable no
  41. ## @return 0 success
  42. ## @return 1 failure
  43. ## @return stdout abspath
  44. function hadoop_abs
  45. {
  46. declare obj=$1
  47. declare dir
  48. declare fn
  49. if [[ ! -e ${obj} ]]; then
  50. return 1
  51. elif [[ -d ${obj} ]]; then
  52. dir=${obj}
  53. else
  54. dir=$(dirname -- "${obj}")
  55. fn=$(basename -- "${obj}")
  56. fn="/${fn}"
  57. fi
  58. dir=$(cd -P -- "${dir}" >/dev/null 2>/dev/null && pwd -P)
  59. if [[ $? = 0 ]]; then
  60. echo "${dir}${fn}"
  61. return 0
  62. fi
  63. return 1
  64. }
  65. ## @description Print a message to stderr
  66. ## @audience public
  67. ## @stability stable
  68. ## @replaceable no
  69. ## @param string
  70. function hadoop_error
  71. {
  72. echo "$*" 1>&2
  73. }
  74. function run_and_redirect
  75. {
  76. declare logfile=$1
  77. shift
  78. declare res
  79. echo "\$ ${*} > ${logfile} 2>&1"
  80. # to the log
  81. {
  82. date
  83. echo "cd $(pwd)"
  84. echo "${*}"
  85. } > "${logfile}"
  86. # run the actual command
  87. "${@}" >> "${logfile}" 2>&1
  88. res=$?
  89. if [[ ${res} != 0 ]]; then
  90. echo
  91. echo "Failed!"
  92. echo
  93. exit "${res}"
  94. fi
  95. }
  96. function hadoop_native_flags
  97. {
  98. # modified version of the Yetus personality
  99. if [[ ${NATIVE} != true ]]; then
  100. return
  101. fi
  102. # Based upon HADOOP-11937
  103. #
  104. # Some notes:
  105. #
  106. # - getting fuse to compile on anything but Linux
  107. # is always tricky.
  108. # - Darwin assumes homebrew is in use.
  109. # - HADOOP-12027 required for bzip2 on OS X.
  110. # - bzip2 is broken in lots of places.
  111. # e.g, HADOOP-12027 for OS X. so no -Drequire.bzip2
  112. #
  113. case "${OSNAME}" in
  114. Linux)
  115. # shellcheck disable=SC2086
  116. echo -Pnative -Drequire.snappy -Drequire.openssl -Drequire.fuse
  117. ;;
  118. Darwin)
  119. echo \
  120. -Pnative -Drequire.snappy \
  121. -Drequire.openssl \
  122. -Dopenssl.prefix=/usr/local/opt/openssl/ \
  123. -Dopenssl.include=/usr/local/opt/openssl/include \
  124. -Dopenssl.lib=/usr/local/opt/openssl/lib
  125. ;;
  126. *)
  127. # shellcheck disable=SC2086
  128. echo \
  129. -Pnative \
  130. -Drequire.snappy -Drequire.openssl \
  131. -Drequire.test.libhadoop
  132. ;;
  133. esac
  134. }
  135. # Function to probe the exit code of the script commands,
  136. # and stop in the case of failure with an contextual error
  137. # message.
  138. function run()
  139. {
  140. declare res
  141. declare logfile
  142. echo "\$ ${*}"
  143. "${@}"
  144. res=$?
  145. if [[ ${res} != 0 ]]; then
  146. echo
  147. echo "Failed!"
  148. echo
  149. exit "${res}"
  150. fi
  151. }
  152. function domd5()
  153. {
  154. run "${MD5SUM}" "${1}" > "${1}.md5"
  155. }
  156. function header()
  157. {
  158. echo
  159. printf "\n\n"
  160. echo "============================================================================"
  161. echo "============================================================================"
  162. centered_text "Hadoop Release Creator"
  163. echo "============================================================================"
  164. echo "============================================================================"
  165. printf "\n\n"
  166. echo "Version to create : ${HADOOP_VERSION}"
  167. echo "Release Candidate Label: ${RC_LABEL##-}"
  168. echo "Source Version : ${DEFAULT_HADOOP_VERSION}"
  169. printf "\n\n"
  170. }
  171. function set_defaults
  172. {
  173. BINDIR=$(dirname "${BIN}")
  174. BASEDIR=$(hadoop_abs "${BINDIR}/../..")
  175. ARTIFACTS_DIR="${BASEDIR}/target/artifacts"
  176. # Extract Hadoop version from ${BASEDIR}/pom.xml
  177. DEFAULT_HADOOP_VERSION=$(grep "<version>" "${BASEDIR}/pom.xml" \
  178. | head -1 \
  179. | sed -e 's|^ *<version>||' -e 's|</version>.*$||')
  180. DOCKER=false
  181. DOCKERCACHE=false
  182. DOCKERFILE="${BASEDIR}/dev-support/docker/Dockerfile"
  183. DOCKERRAN=false
  184. # Extract Java version from ${BASEDIR}/pom.xml
  185. # doing this outside of maven means we can do this before
  186. # the docker container comes up...
  187. JVM_VERSION=$(grep "<javac.version>" "${BASEDIR}/hadoop-project/pom.xml" \
  188. | head -1 \
  189. | sed -e 's|^ *<javac.version>||' -e 's|</javac.version>.*$||' -e 's|..||')
  190. GIT=$(command -v git)
  191. GPG=$(command -v gpg)
  192. GPGAGENT=$(command -v gpg-agent)
  193. HADOOP_VERSION="${DEFAULT_HADOOP_VERSION}"
  194. INDOCKER=false
  195. LOGDIR="${BASEDIR}/patchprocess"
  196. if [[ -z "${MVN}" ]]; then
  197. if [[ -n "${MAVEN_HOME}" ]]; then
  198. MVN=${MAVEN_HOME}/bin/mvn
  199. else
  200. MVN=$(command -v mvn)
  201. fi
  202. fi
  203. MD5SUM=$(command -v md5sum)
  204. if [[ -z "${MD5SUM}" ]]; then
  205. MD5SUM=$(command -v md5)
  206. fi
  207. NATIVE=false
  208. OSNAME=$(uname -s)
  209. PUBKEYFILE="https://dist.apache.org/repos/dist/release/hadoop/common/KEYS"
  210. }
  211. function startgpgagent
  212. {
  213. if [[ "${SIGN}" = true ]]; then
  214. if [[ -n "${GPGAGENT}" && -z "${GPG_AGENT_INFO}" ]]; then
  215. echo "starting gpg agent"
  216. touch "${LOGDIR}/gpgagent.conf"
  217. eval $("${GPGAGENT}" --daemon \
  218. --options "${LOGDIR}/gpgagent.conf" \
  219. --log-file=${LOGDIR}/create-release-gpgagent.log)
  220. GPGAGENTPID=$(echo ${GPG_AGENT_INFO} | cut -f 2 -d:)
  221. fi
  222. fi
  223. }
  224. function stopgpgagent
  225. {
  226. if [[ -n "${GPGAGENTPID}" ]]; then
  227. kill ${GPGAGENTPID}
  228. fi
  229. }
  230. function usage
  231. {
  232. echo "--artifactsdir=[path] Path to use to store release bits"
  233. echo "--asfrelease Make an ASF release"
  234. echo "--docker Use Hadoop's Dockerfile for guaranteed environment"
  235. echo "--dockercache Use a Docker-private maven cache"
  236. echo "--logdir=[path] Path to store logs"
  237. echo "--mvncache=[path] Path to the maven cache to use"
  238. echo "--native Also build the native components"
  239. echo "--rc-label=[label] Add this label to the builds"
  240. echo "--sign Use .gnupg dir to sign the jars"
  241. echo "--version=[version] Use an alternative version string"
  242. }
  243. function option_parse
  244. {
  245. declare i
  246. for i in "$@"; do
  247. case ${i} in
  248. --asfrelease)
  249. ASFRELEASE=true
  250. NATIVE=true
  251. SIGN=true
  252. ;;
  253. --artifactsdir=*)
  254. ARTIFACTS_DIR=${i#*=}
  255. ;;
  256. --docker)
  257. DOCKER=true
  258. ;;
  259. --dockercache)
  260. DOCKERCACHE=true
  261. ;;
  262. --help)
  263. usage
  264. exit
  265. ;;
  266. --indocker)
  267. INDOCKER=true
  268. ;;
  269. --logdir=*)
  270. LOGDIR=${i#*=}
  271. ;;
  272. --mvncache=*)
  273. MVNCACHE=${i#*=}
  274. ;;
  275. --native)
  276. NATIVE=true
  277. ;;
  278. --rc-label=*)
  279. RC_LABEL=${i#*=}
  280. ;;
  281. --sign)
  282. SIGN=true
  283. ;;
  284. --version=*)
  285. HADOOP_VERSION=${i#*=}
  286. ;;
  287. esac
  288. done
  289. if [[ ! -d "${HOME}/.gnupg" ]]; then
  290. hadoop_error "ERROR: No .gnupg dir. Disabling signing capability."
  291. SIGN=false
  292. fi
  293. DOCKERCMD=$(command -v docker)
  294. if [[ "${DOCKER}" = true && -z "${DOCKERCMD}" ]]; then
  295. hadoop_error "ERROR: docker binary not found. Disabling docker mode."
  296. DOCKER=false
  297. fi
  298. if [[ "${DOCKERCACHE}" = true && "${DOCKER}" = false ]]; then
  299. if [[ "${INDOCKER}" = false ]]; then
  300. hadoop_error "ERROR: docker mode not enabled. Disabling dockercache."
  301. fi
  302. DOCKERCACHE=false
  303. fi
  304. if [[ "${DOCKERCACHE}" = true && -n "${MVNCACHE}" ]]; then
  305. hadoop_error "ERROR: Cannot set --mvncache and --dockercache simultaneously."
  306. exit 1
  307. else
  308. MVNCACHE=${MVNCACHE:-"${HOME}/.m2"}
  309. fi
  310. if [[ "${ASFRELEASE}" = true ]]; then
  311. if [[ "${SIGN}" = false ]]; then
  312. hadoop_error "ERROR: --asfrelease requires --sign. Exiting."
  313. exit 1
  314. fi
  315. if [[ "${OSNAME}" = Linux ]]; then
  316. if [[ "${DOCKER}" = false && "${INDOCKER}" = false ]]; then
  317. hadoop_error "ERROR: --asfrelease requires --docker on Linux. Exiting."
  318. exit 1
  319. elif [[ "${DOCKERCACHE}" = false && "${INDOCKER}" = false ]]; then
  320. hadoop_error "ERROR: --asfrelease on Linux requires --dockercache. Exiting."
  321. exit 1
  322. fi
  323. fi
  324. fi
  325. if [[ -n "${MVNCACHE}" ]]; then
  326. mkdir -p "${MVNCACHE}"
  327. if [[ -d "${MVNCACHE}" ]]; then
  328. MVN_ARGS=("-Dmaven.repo.local=${MVNCACHE}")
  329. fi
  330. fi
  331. }
  332. function dockermode
  333. {
  334. declare lines
  335. declare -a modp
  336. declare imgname
  337. declare -a extrad
  338. declare user_name
  339. declare group_id
  340. if [[ "${DOCKER}" != true ]]; then
  341. return
  342. fi
  343. user_name=${SUDO_USER:=$USER}
  344. user_id=$(id -u "${user_name}")
  345. group_id=$(id -g "${user_name}")
  346. imgname="hadoop/createrelease:${HADOOP_VERSION}_${RANDOM}"
  347. if [[ -d "${HOME}/.gnupg" ]]; then
  348. extrad+=("-v" "${HOME}/.gnupg:/home/${user_name}/.gnupg")
  349. fi
  350. if [[ -n "${LOGDIR}" ]]; then
  351. if [[ ! -d "${LOGDIR}" ]]; then
  352. mkdir -p "${LOGDIR}"
  353. fi
  354. lines=$(hadoop_abs "${LOGDIR}")
  355. extrad+=("-v" "${lines}:${lines}")
  356. fi
  357. if [[ -n "${ARTIFACTS_DIR}" ]]; then
  358. if [[ ! -d "${ARTIFACTS_DIR}" ]]; then
  359. mkdir -p "${ARTIFACTS_DIR}"
  360. fi
  361. lines=$(hadoop_abs "${ARTIFACTS_DIR}")
  362. extrad+=("-v" "${lines}:${lines}")
  363. fi
  364. if [[ "${DOCKERCACHE}" = true ]]; then
  365. modp+=("--mvncache=/maven")
  366. else
  367. lines=$(hadoop_abs "${MVNCACHE}")
  368. extrad+=("-v" "${lines}:${lines}")
  369. fi
  370. for lines in "${PARAMS[@]}"; do
  371. if [[ "${lines}" != "--docker" ]]; then
  372. modp+=("$lines")
  373. fi
  374. done
  375. modp+=("--indocker")
  376. (
  377. lines=$(grep -n 'YETUS CUT HERE' "${DOCKERFILE}" | cut -f1 -d:)
  378. if [[ -z "${lines}" ]]; then
  379. cat "${DOCKERFILE}"
  380. else
  381. head -n "${lines}" "${DOCKERFILE}"
  382. fi
  383. # make sure we put some space between, just in case last
  384. # line isn't an empty line or whatever
  385. printf "\n\n"
  386. echo "RUN groupadd --non-unique -g ${group_id} ${user_name}"
  387. echo "RUN useradd -g ${group_id} -u ${user_id} -m ${user_name}"
  388. echo "RUN chown -R ${user_name} /home/${user_name}"
  389. echo "ENV HOME /home/${user_name}"
  390. echo "RUN mkdir -p /maven"
  391. echo "RUN chown -R ${user_name} /maven"
  392. # we always force build with the Oracle JDK
  393. # but with the correct version
  394. echo "ENV JAVA_HOME /usr/lib/jvm/java-${JVM_VERSION}-oracle"
  395. echo "USER ${user_name}"
  396. printf "\n\n"
  397. ) | docker build -t "${imgname}" -
  398. run docker run -i -t \
  399. --privileged \
  400. "${extrad[@]}" \
  401. -v "${BASEDIR}:/build/source" \
  402. -u "${user_name}" \
  403. -w "/build/source" \
  404. "${imgname}" \
  405. "/build/source/dev-support/bin/create-release" "${modp[@]}"
  406. DOCKERRAN=true
  407. }
  408. function makearelease
  409. {
  410. # let's start at the root
  411. run cd "${BASEDIR}"
  412. big_console_header "Cleaning the Source Tree"
  413. # git clean to clear any remnants from previous build
  414. run "${GIT}" clean -xdf
  415. mkdir -p "${LOGDIR}"
  416. # mvn clean for sanity
  417. run_and_redirect "${LOGDIR}/mvn_clean.log" "${MVN}" "${MVN_ARGS[@]}" clean
  418. # Create staging dir for release artifacts
  419. run mkdir -p "${ARTIFACTS_DIR}"
  420. big_console_header "Apache RAT Check"
  421. # Create RAT report
  422. run_and_redirect "${LOGDIR}/mvn_apache_rat.log" "${MVN}" "${MVN_ARGS[@]}" apache-rat:check
  423. big_console_header "Maven Build and Install"
  424. # Create SRC and BIN tarballs for release,
  425. # Using 'install’ goal instead of 'package' so artifacts are available
  426. # in the Maven local cache for the site generation
  427. #
  428. # shellcheck disable=SC2046
  429. run_and_redirect "${LOGDIR}/mvn_install.log" \
  430. "${MVN}" "${MVN_ARGS[@]}" install -Pdist,src \
  431. -DskipTests -Dtar $(hadoop_native_flags)
  432. big_console_header "Maven Site"
  433. # Create site for release
  434. run_and_redirect "${LOGDIR}/mvn_site.log" "${MVN}" "${MVN_ARGS[@]}" site site:stage -Pdist,src,releasedocs
  435. big_console_header "Staging the release"
  436. run mv "${BASEDIR}/target/staging/hadoop-project" "${BASEDIR}/target/r${HADOOP_VERSION}/"
  437. run cd "${BASEDIR}/target/"
  438. run tar czpf "hadoop-site-${HADOOP_VERSION}.tar.gz" "r${HADOOP_VERSION}"/*
  439. run cd "${BASEDIR}"
  440. # Stage RAT report
  441. #shellcheck disable=SC2038
  442. find . -name rat.txt | xargs -I% cat % > "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-rat.txt"
  443. # Stage CHANGES and RELEASENOTES files
  444. for i in CHANGES RELEASENOTES; do
  445. run cp -p \
  446. "${BASEDIR}/hadoop-common-project/hadoop-common/src/site/markdown/release/${HADOOP_VERSION}"/${i}*.md \
  447. "${ARTIFACTS_DIR}/${i}.md"
  448. done
  449. # Prepare and stage BIN tarball
  450. run cd "${BASEDIR}/hadoop-dist/target/"
  451. run tar -xzpf "hadoop-${HADOOP_VERSION}.tar.gz"
  452. run mkdir -p "hadoop-${HADOOP_VERSION}/share/doc/hadoop/"
  453. run cp -r "${BASEDIR}/target/r${HADOOP_VERSION}"/* "hadoop-${HADOOP_VERSION}/share/doc/hadoop/"
  454. run tar -czpf "hadoop-${HADOOP_VERSION}.tar.gz" "hadoop-${HADOOP_VERSION}"
  455. run cd "${BASEDIR}"
  456. run mv \
  457. "${BASEDIR}/hadoop-dist/target/hadoop-${HADOOP_VERSION}.tar.gz" \
  458. "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz"
  459. # Stage SRC tarball
  460. run mv \
  461. "${BASEDIR}/hadoop-dist/target/hadoop-${HADOOP_VERSION}-src.tar.gz" \
  462. "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-src.tar.gz"
  463. # Stage SITE tarball
  464. run mv \
  465. "${BASEDIR}/target/hadoop-site-${HADOOP_VERSION}.tar.gz" \
  466. "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-site.tar.gz"
  467. }
  468. function signartifacts
  469. {
  470. declare i
  471. if [[ "${SIGN}" = false ]]; then
  472. for i in ${ARTIFACTS_DIR}/*; do
  473. domd5 "${i}"
  474. done
  475. echo ""
  476. echo "Remember to sign the artifacts before staging them on the open"
  477. echo ""
  478. return
  479. fi
  480. big_console_header "Signing the release"
  481. for i in ${ARTIFACTS_DIR}/*; do
  482. gpg --use-agent --armor --output "${i}.asc" --detach-sig "${i}"
  483. gpg --print-mds "${i}" > "${i}.mds"
  484. domd5 "${i}"
  485. done
  486. if [[ "${ASFRELEASE}" = true ]]; then
  487. echo "Fetching the Apache Hadoop KEYS file..."
  488. curl -L "${PUBKEYFILE}" -o "${BASEDIR}/target/KEYS"
  489. gpg --import --trustdb "${BASEDIR}/target/testkeysdb" "${BASEDIR}/target/KEYS"
  490. gpg --verify --trustdb "${BASEDIR}/target/testkeysdb" \
  491. "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz.asc" \
  492. "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz"
  493. if [[ $? != 0 ]]; then
  494. hadoop_error "ERROR: GPG key is not present in ${PUBKEYFILE}."
  495. hadoop_error "ERROR: This MUST be fixed. Exiting."
  496. exit 1
  497. fi
  498. fi
  499. }
  500. # find root of the source tree
  501. BIN=$(hadoop_abs "${BASH_SOURCE:-$0}")
  502. PARAMS=("$@")
  503. set_defaults
  504. option_parse "${PARAMS[@]}"
  505. dockermode
  506. header
  507. if [[ -n ${RC_LABEL} ]]; then
  508. RC_LABEL="-${RC_LABEL}"
  509. fi
  510. if [[ "${INDOCKER}" = true || "${DOCKERRAN}" = false ]]; then
  511. startgpgagent
  512. makearelease
  513. signartifacts
  514. stopgpgagent
  515. fi
  516. if [[ "${INDOCKER}" = true ]]; then
  517. exit $?
  518. fi
  519. if [[ $? == 0 ]]; then
  520. echo
  521. echo "Congratulations, you have successfully built the release"
  522. echo "artifacts for Apache Hadoop ${HADOOP_VERSION}${RC_LABEL}"
  523. echo
  524. echo "The artifacts for this run are available at ${ARTIFACTS_DIR}:"
  525. run ls -1 "${ARTIFACTS_DIR}"
  526. echo
  527. fi