create-release 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  17. || [[ "${BASH_VERSINFO[0]}" -lt 3 ]] \
  18. || [[ "${BASH_VERSINFO[0]}" -eq 3 && "${BASH_VERSINFO[1]}" -lt 2 ]]; then
  19. echo "bash v3.2+ is required. Sorry."
  20. exit 1
  21. fi
  22. function centered_text
  23. {
  24. local text="$*"
  25. local spacing=$(( (75+${#text}) /2 ))
  26. printf "%*s\n" ${spacing} "${text}"
  27. }
  28. function big_console_header
  29. {
  30. printf "\n\n"
  31. echo "****************************************************************************"
  32. centered_text "${@}"
  33. echo "****************************************************************************"
  34. printf "\n\n"
  35. }
  36. ## @description Given a filename or dir, return the absolute version of it
  37. ## @audience public
  38. ## @stability stable
  39. ## @param directory
  40. ## @replaceable no
  41. ## @return 0 success
  42. ## @return 1 failure
  43. ## @return stdout abspath
  44. function hadoop_abs
  45. {
  46. declare obj=$1
  47. declare dir
  48. declare fn
  49. declare ret
  50. if [[ ! -e ${obj} ]]; then
  51. return 1
  52. elif [[ -d ${obj} ]]; then
  53. dir=${obj}
  54. else
  55. dir=$(dirname -- "${obj}")
  56. fn=$(basename -- "${obj}")
  57. fn="/${fn}"
  58. fi
  59. dir=$(cd -P -- "${dir}" >/dev/null 2>/dev/null && pwd -P)
  60. ret=$?
  61. if [[ ${ret} = 0 ]]; then
  62. echo "${dir}${fn}"
  63. return 0
  64. fi
  65. return 1
  66. }
  67. ## @description Print a message to stderr
  68. ## @audience public
  69. ## @stability stable
  70. ## @replaceable no
  71. ## @param string
  72. function hadoop_error
  73. {
  74. echo "$*" 1>&2
  75. }
  76. function run_and_redirect
  77. {
  78. declare logfile=$1
  79. shift
  80. declare res
  81. echo "\$ ${*} > ${logfile} 2>&1"
  82. # to the log
  83. {
  84. date
  85. echo "cd $(pwd)"
  86. echo "${*}"
  87. } > "${logfile}"
  88. # run the actual command
  89. "${@}" >> "${logfile}" 2>&1
  90. res=$?
  91. if [[ ${res} != 0 ]]; then
  92. echo
  93. echo "Failed!"
  94. echo
  95. exit "${res}"
  96. fi
  97. }
  98. function hadoop_native_flags
  99. {
  100. # modified version of the Yetus personality
  101. if [[ ${NATIVE} != true ]]; then
  102. return
  103. fi
  104. # Based upon HADOOP-11937
  105. #
  106. # Some notes:
  107. #
  108. # - getting fuse to compile on anything but Linux
  109. # is always tricky.
  110. # - Darwin assumes homebrew is in use.
  111. # - HADOOP-12027 required for bzip2 on OS X.
  112. # - bzip2 is broken in lots of places.
  113. # e.g, HADOOP-12027 for OS X. so no -Drequire.bzip2
  114. #
  115. case "${OSNAME}" in
  116. Linux)
  117. # shellcheck disable=SC2086
  118. echo -Pnative -Drequire.snappy -Drequire.openssl -Drequire.fuse
  119. ;;
  120. Darwin)
  121. echo \
  122. -Pnative -Drequire.snappy \
  123. -Drequire.openssl \
  124. -Dopenssl.prefix=/usr/local/opt/openssl/ \
  125. -Dopenssl.include=/usr/local/opt/openssl/include \
  126. -Dopenssl.lib=/usr/local/opt/openssl/lib
  127. ;;
  128. *)
  129. # shellcheck disable=SC2086
  130. echo \
  131. -Pnative \
  132. -Drequire.snappy -Drequire.openssl \
  133. -Drequire.test.libhadoop
  134. ;;
  135. esac
  136. }
  137. # Function to probe the exit code of the script commands,
  138. # and stop in the case of failure with an contextual error
  139. # message.
  140. function run()
  141. {
  142. declare res
  143. declare logfile
  144. echo "\$ ${*}"
  145. "${@}"
  146. res=$?
  147. if [[ ${res} != 0 ]]; then
  148. echo
  149. echo "Failed!"
  150. echo
  151. exit "${res}"
  152. fi
  153. }
  154. function header()
  155. {
  156. echo
  157. printf "\n\n"
  158. echo "============================================================================"
  159. echo "============================================================================"
  160. centered_text "Hadoop Release Creator"
  161. echo "============================================================================"
  162. echo "============================================================================"
  163. printf "\n\n"
  164. echo "Version to create : ${HADOOP_VERSION}"
  165. echo "Release Candidate Label: ${RC_LABEL##-}"
  166. echo "Source Version : ${DEFAULT_HADOOP_VERSION}"
  167. printf "\n\n"
  168. }
  169. function set_defaults
  170. {
  171. BINDIR=$(dirname "${BIN}")
  172. BASEDIR=$(hadoop_abs "${BINDIR}/../..")
  173. ARTIFACTS_DIR="${BASEDIR}/target/artifacts"
  174. # Extract Hadoop version from ${BASEDIR}/pom.xml
  175. DEFAULT_HADOOP_VERSION=$(grep "<version>" "${BASEDIR}/pom.xml" \
  176. | head -1 \
  177. | sed -e 's|^ *<version>||' -e 's|</version>.*$||')
  178. DEPLOY=false
  179. DOCKER=false
  180. DOCKERCACHE=false
  181. DOCKERFILE="${BASEDIR}/dev-support/docker/Dockerfile"
  182. DOCKERRAN=false
  183. CPU_ARCH=$(echo "$MACHTYPE" | cut -d- -f1)
  184. if [[ "$CPU_ARCH" = "aarch64" || "$CPU_ARCH" = "arm64" ]]; then
  185. echo "Using aarch64 docker file"
  186. DOCKERFILE="${BASEDIR}/dev-support/docker/Dockerfile_aarch64"
  187. fi
  188. # Extract Java version from ${BASEDIR}/pom.xml
  189. # doing this outside of maven means we can do this before
  190. # the docker container comes up...
  191. JVM_VERSION=$(grep "<javac.version>" "${BASEDIR}/hadoop-project/pom.xml" \
  192. | head -1 \
  193. | sed -e 's|^ *<javac.version>||' -e 's|</javac.version>.*$||' -e 's|..||')
  194. GIT=$(command -v git)
  195. GPG=$(command -v gpg)
  196. GPGAGENT=$(command -v gpg-agent)
  197. HADOOP_VERSION="${DEFAULT_HADOOP_VERSION}"
  198. INDOCKER=false
  199. LOGDIR="${BASEDIR}/patchprocess"
  200. if [[ -z "${MVN}" ]]; then
  201. if [[ -n "${MAVEN_HOME}" ]]; then
  202. MVN=${MAVEN_HOME}/bin/mvn
  203. else
  204. MVN=$(command -v mvn)
  205. fi
  206. fi
  207. NATIVE=false
  208. OSNAME=$(uname -s)
  209. PUBKEYFILE="https://dist.apache.org/repos/dist/release/hadoop/common/KEYS"
  210. SIGN=false
  211. }
  212. function startgpgagent
  213. {
  214. if [[ "${SIGN}" = true ]]; then
  215. if [[ -n "${GPGAGENT}" && -z "${GPG_AGENT_INFO}" ]]; then
  216. echo "starting gpg agent"
  217. echo "default-cache-ttl 36000" > "${LOGDIR}/gpgagent.conf"
  218. echo "max-cache-ttl 36000" >> "${LOGDIR}/gpgagent.conf"
  219. # shellcheck disable=2046
  220. eval $("${GPGAGENT}" --daemon \
  221. --options "${LOGDIR}/gpgagent.conf" \
  222. --log-file="${LOGDIR}/create-release-gpgagent.log")
  223. GPGAGENTPID=$(pgrep "${GPGAGENT}")
  224. GPG_AGENT_INFO="$HOME/.gnupg/S.gpg-agent:$GPGAGENTPID:1"
  225. export GPG_AGENT_INFO
  226. fi
  227. if [[ -n "${GPG_AGENT_INFO}" ]]; then
  228. echo "Warming the gpg-agent cache prior to calling maven"
  229. # warm the agent's cache:
  230. touch "${LOGDIR}/warm"
  231. ${GPG} --use-agent --armor --output "${LOGDIR}/warm.asc" --detach-sig "${LOGDIR}/warm"
  232. rm "${LOGDIR}/warm.asc" "${LOGDIR}/warm"
  233. else
  234. SIGN=false
  235. hadoop_error "ERROR: Unable to launch or acquire gpg-agent. Disable signing."
  236. fi
  237. fi
  238. }
  239. function stopgpgagent
  240. {
  241. if [[ -n "${GPGAGENTPID}" ]]; then
  242. kill "${GPGAGENTPID}"
  243. fi
  244. }
  245. function usage
  246. {
  247. echo "--artifactsdir=[path] Path to use to store release bits"
  248. echo "--asfrelease Make an ASF release"
  249. echo "--deploy Deploy Maven artifacts using ~/.m2/settings.xml"
  250. echo "--docker Use Hadoop's Dockerfile for guaranteed environment"
  251. echo "--dockercache Use a Docker-private maven cache"
  252. echo "--logdir=[path] Path to store logs"
  253. echo "--mvncache=[path] Path to the maven cache to use"
  254. echo "--native Also build the native components"
  255. echo "--rc-label=[label] Add this label to the builds"
  256. echo "--security Emergency security release"
  257. echo "--sign Use .gnupg dir to sign the artifacts and jars"
  258. echo "--version=[version] Use an alternative version string"
  259. echo "--mvnargs=[args] Extra Maven args to be provided when running mvn commands"
  260. }
  261. function option_parse
  262. {
  263. declare i
  264. for i in "$@"; do
  265. case ${i} in
  266. --asfrelease)
  267. ASFRELEASE=true
  268. NATIVE=true
  269. SIGN=true
  270. DEPLOY=true
  271. ;;
  272. --artifactsdir=*)
  273. ARTIFACTS_DIR=${i#*=}
  274. ;;
  275. --deploy)
  276. DEPLOY=true
  277. ;;
  278. --docker)
  279. DOCKER=true
  280. ;;
  281. --dockercache)
  282. DOCKERCACHE=true
  283. ;;
  284. --help)
  285. usage
  286. exit
  287. ;;
  288. --indocker)
  289. INDOCKER=true
  290. ;;
  291. --logdir=*)
  292. LOGDIR=${i#*=}
  293. ;;
  294. --mvncache=*)
  295. MVNCACHE=${i#*=}
  296. ;;
  297. --native)
  298. NATIVE=true
  299. ;;
  300. --rc-label=*)
  301. RC_LABEL=${i#*=}
  302. ;;
  303. --security)
  304. SECURITYRELEASE=true
  305. ;;
  306. --sign)
  307. SIGN=true
  308. ;;
  309. --version=*)
  310. HADOOP_VERSION=${i#*=}
  311. ;;
  312. --mvnargs=*)
  313. MVNEXTRAARGS=${i#*=}
  314. ;;
  315. esac
  316. done
  317. if [[ ! -d "${HOME}/.gnupg" ]]; then
  318. hadoop_error "ERROR: No .gnupg dir. Disabling signing capability."
  319. SIGN=false
  320. fi
  321. if [[ "${SIGN}" = true ]]; then
  322. if [[ -n "${GPG_AGENT_INFO}" ]]; then
  323. echo "NOTE: Using existing gpg-agent. If the default-cache-ttl"
  324. echo "is set to less than ~20 mins, maven commands will fail."
  325. elif [[ -z "${GPGAGENT}" ]]; then
  326. hadoop_error "ERROR: No gpg-agent. Disabling signing capability."
  327. SIGN=false
  328. fi
  329. fi
  330. if [[ "${DEPLOY}" = true && ! -f "${HOME}/.m2/settings.xml" ]]; then
  331. hadoop_error "ERROR: No ~/.m2/settings.xml file, cannot deploy Maven artifacts."
  332. exit 1
  333. fi
  334. DOCKERCMD=$(command -v docker)
  335. if [[ "${DOCKER}" = true && -z "${DOCKERCMD}" ]]; then
  336. hadoop_error "ERROR: docker binary not found. Disabling docker mode."
  337. DOCKER=false
  338. fi
  339. if [[ "${DOCKERCACHE}" = true && "${DOCKER}" = false ]]; then
  340. if [[ "${INDOCKER}" = false ]]; then
  341. hadoop_error "ERROR: docker mode not enabled. Disabling dockercache."
  342. fi
  343. DOCKERCACHE=false
  344. fi
  345. if [[ "${DOCKERCACHE}" = true && -n "${MVNCACHE}" ]]; then
  346. hadoop_error "ERROR: Cannot set --mvncache and --dockercache simultaneously."
  347. exit 1
  348. else
  349. MVNCACHE=${MVNCACHE:-"${HOME}/.m2/repository"}
  350. fi
  351. if [[ "${ASFRELEASE}" = true ]]; then
  352. if [[ "${SIGN}" = false ]]; then
  353. hadoop_error "ERROR: --asfrelease requires --sign. Exiting."
  354. exit 1
  355. fi
  356. if [[ "${OSNAME}" = Linux ]]; then
  357. if [[ "${DOCKER}" = false && "${INDOCKER}" = false ]]; then
  358. hadoop_error "ERROR: --asfrelease requires --docker on Linux. Exiting."
  359. exit 1
  360. elif [[ "${DOCKERCACHE}" = false && "${INDOCKER}" = false ]]; then
  361. hadoop_error "ERROR: --asfrelease on Linux requires --dockercache. Exiting."
  362. exit 1
  363. fi
  364. fi
  365. fi
  366. if [[ -n "${MVNCACHE}" ]]; then
  367. mkdir -p "${MVNCACHE}"
  368. if [[ -d "${MVNCACHE}" ]]; then
  369. MVN_ARGS=("-Dmaven.repo.local=${MVNCACHE}")
  370. fi
  371. fi
  372. if [ -n "$MVNEXTRAARGS" ]; then
  373. MVN_ARGS+=("$MVNEXTRAARGS")
  374. fi
  375. if [[ "${SECURITYRELEASE}" = true ]]; then
  376. if [[ ! -d "${BASEDIR}/hadoop-common-project/hadoop-common/src/site/markdown/release/${HADOOP_VERSION}" ]]; then
  377. hadoop_error "ERROR: ${BASEDIR}/hadoop-common-project/hadoop-common/src/site/markdown/release/${HADOOP_VERSION} does not exist."
  378. hadoop_error "ERROR: This directory and its contents are required to be manually created for a security release."
  379. exit 1
  380. fi
  381. fi
  382. }
  383. function dockermode
  384. {
  385. declare lines
  386. declare -a modp
  387. declare imgname
  388. declare -a extrad
  389. declare user_name
  390. declare group_id
  391. if [[ "${DOCKER}" != true ]]; then
  392. return
  393. fi
  394. user_name=${SUDO_USER:=$USER}
  395. user_id=$(id -u "${user_name}")
  396. group_id=$(id -g "${user_name}")
  397. imgname="hadoop/createrelease:${HADOOP_VERSION}_${RANDOM}"
  398. if [[ -d "${HOME}/.gnupg" ]]; then
  399. extrad+=("-v" "${HOME}/.gnupg:/home/${user_name}/.gnupg")
  400. fi
  401. if [[ -n "${LOGDIR}" ]]; then
  402. if [[ ! -d "${LOGDIR}" ]]; then
  403. mkdir -p "${LOGDIR}"
  404. fi
  405. lines=$(hadoop_abs "${LOGDIR}")
  406. extrad+=("-v" "${lines}:${lines}")
  407. fi
  408. if [[ -n "${ARTIFACTS_DIR}" ]]; then
  409. if [[ ! -d "${ARTIFACTS_DIR}" ]]; then
  410. mkdir -p "${ARTIFACTS_DIR}"
  411. fi
  412. lines=$(hadoop_abs "${ARTIFACTS_DIR}")
  413. extrad+=("-v" "${lines}:${lines}")
  414. fi
  415. if [[ "${DEPLOY}" = true ]]; then
  416. modp+=("--deploy")
  417. extrad+=("-v" "${HOME}/.m2/settings.xml:/home/${user_name}/.m2/settings.xml")
  418. fi
  419. if [[ "${DOCKERCACHE}" = true ]]; then
  420. modp+=("--mvncache=/maven")
  421. else
  422. lines=$(hadoop_abs "${MVNCACHE}")
  423. extrad+=("-v" "${lines}:${lines}")
  424. fi
  425. for lines in "${PARAMS[@]}"; do
  426. if [[ "${lines}" != "--docker" ]]; then
  427. modp+=("$lines")
  428. fi
  429. done
  430. modp+=("--indocker")
  431. (
  432. lines=$(grep -n 'YETUS CUT HERE' "${DOCKERFILE}" | cut -f1 -d:)
  433. if [[ -z "${lines}" ]]; then
  434. cat "${DOCKERFILE}"
  435. else
  436. head -n "${lines}" "${DOCKERFILE}"
  437. fi
  438. # make sure we put some space between, just in case last
  439. # line isn't an empty line or whatever
  440. printf "\n\n"
  441. # force a new image for every run to make it easier to remove later
  442. echo "LABEL org.apache.hadoop.create-release=\"cr-${RANDOM}\""
  443. # setup ownerships, etc
  444. echo "RUN groupadd --non-unique -g ${group_id} ${user_name}"
  445. echo "RUN useradd -g ${group_id} -u ${user_id} -m ${user_name}"
  446. echo "RUN chown -R ${user_name} /home/${user_name}"
  447. echo "ENV HOME /home/${user_name}"
  448. echo "RUN mkdir -p /maven"
  449. echo "RUN chown -R ${user_name} /maven"
  450. # we always force build with the OpenJDK JDK
  451. # but with the correct version
  452. if [[ "$CPU_ARCH" = "aarch64" || "$CPU_ARCH" = "arm64" ]]; then
  453. echo "ENV JAVA_HOME /usr/lib/jvm/java-${JVM_VERSION}-openjdk-arm64"
  454. else
  455. echo "ENV JAVA_HOME /usr/lib/jvm/java-${JVM_VERSION}-openjdk-amd64"
  456. fi
  457. echo "USER ${user_name}"
  458. printf "\n\n"
  459. ) | docker build -t "${imgname}" -
  460. run docker run -i -t \
  461. --privileged \
  462. "${extrad[@]}" \
  463. -v "${BASEDIR}:/build/source" \
  464. -u "${user_name}" \
  465. -w "/build/source" \
  466. "${imgname}" \
  467. "/build/source/dev-support/bin/create-release" "${modp[@]}"
  468. DOCKERRAN=true
  469. }
  470. function makearelease
  471. {
  472. # let's start at the root
  473. run cd "${BASEDIR}"
  474. big_console_header "Cleaning the Source Tree"
  475. # Since CVE-2022-24765 in April 2022, git refuses to work in directories
  476. # whose owner != the current user, unless explicitly told to trust it.
  477. git config --global --add safe.directory /build/source
  478. # git clean to clear any remnants from previous build
  479. run "${GIT}" clean -xdf -e /patchprocess
  480. mkdir -p "${LOGDIR}"
  481. # Install the Hadoop maven plugins first
  482. run_and_redirect "${LOGDIR}/mvn_install_maven_plugins.log" "${MVN}" "${MVN_ARGS[@]}" -pl hadoop-maven-plugins -am clean install
  483. # mvn clean for sanity
  484. run_and_redirect "${LOGDIR}/mvn_clean.log" "${MVN}" "${MVN_ARGS[@]}" clean
  485. # Create staging dir for release artifacts
  486. run mkdir -p "${ARTIFACTS_DIR}"
  487. big_console_header "Apache RAT Check"
  488. # Create RAT report
  489. run_and_redirect "${LOGDIR}/mvn_apache_rat.log" "${MVN}" "${MVN_ARGS[@]}" apache-rat:check
  490. big_console_header "Maven Build and Install"
  491. if [[ "${SIGN}" = true ]]; then
  492. signflags=("-Psign" "-Dgpg.useagent=true" "-Dgpg.executable=${GPG}")
  493. fi
  494. local target="install"
  495. if [[ "${DEPLOY}" = true ]]; then
  496. target="deploy"
  497. fi
  498. # Create SRC and BIN tarballs for release,
  499. # shellcheck disable=SC2046
  500. run_and_redirect "${LOGDIR}/mvn_${target}.log" \
  501. "${MVN}" "${MVN_ARGS[@]}" ${target} \
  502. -Pdist,src,yarn-ui \
  503. "${signflags[@]}" \
  504. -DskipTests -Dtar $(hadoop_native_flags)
  505. # Stage BIN tarball
  506. run cd "${BASEDIR}"
  507. run mv \
  508. "${BASEDIR}/hadoop-dist/target/hadoop-${HADOOP_VERSION}.tar.gz" \
  509. "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz"
  510. # Stage SRC tarball
  511. run mv \
  512. "${BASEDIR}/hadoop-dist/target/hadoop-${HADOOP_VERSION}-src.tar.gz" \
  513. "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-src.tar.gz"
  514. big_console_header "Maven Site"
  515. if [[ "${SECURITYRELEASE}" = true ]]; then
  516. DOCFLAGS="-Pdocs"
  517. hadoop_error "WARNING: Skipping automatic changelog and release notes generation due to --security"
  518. else
  519. DOCFLAGS="-Preleasedocs,docs"
  520. fi
  521. # Create site for release
  522. # we need to do install again so that jdiff and
  523. # a few other things get registered in the maven
  524. # universe correctly
  525. run_and_redirect "${LOGDIR}/mvn_site.log" \
  526. "${MVN}" "${MVN_ARGS[@]}" install \
  527. site site:stage \
  528. -DskipTests \
  529. -DskipShade \
  530. -Pdist,src \
  531. "${DOCFLAGS}"
  532. # Create the site tarball
  533. run mv "${BASEDIR}/target/staging/hadoop-project" "${BASEDIR}/target/r${HADOOP_VERSION}/"
  534. run cd "${BASEDIR}/target/"
  535. run tar czpf "hadoop-site-${HADOOP_VERSION}.tar.gz" "r${HADOOP_VERSION}"/*
  536. run cd "${BASEDIR}"
  537. # Stage SITE tarball
  538. run mv \
  539. "${BASEDIR}/target/hadoop-site-${HADOOP_VERSION}.tar.gz" \
  540. "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-site.tar.gz"
  541. # Stage RAT report
  542. #shellcheck disable=SC2038
  543. find . -name rat.txt | xargs -I% cat % > "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-rat.txt"
  544. # Stage CHANGELOG and RELEASENOTES files
  545. for i in CHANGELOG RELEASENOTES; do
  546. run cp -p \
  547. "${BASEDIR}/hadoop-common-project/hadoop-common/src/site/markdown/release/${HADOOP_VERSION}"/${i}*.md \
  548. "${ARTIFACTS_DIR}/${i}.md"
  549. done
  550. # We need to fixup the BIN tarball at the end to contain the site docs.
  551. run cd "${ARTIFACTS_DIR}"
  552. run tar -xzpf "hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz"
  553. run mkdir -p "hadoop-${HADOOP_VERSION}/share/doc/hadoop/"
  554. run cp -r "${BASEDIR}/target/r${HADOOP_VERSION}"/* "hadoop-${HADOOP_VERSION}/share/doc/hadoop/"
  555. run tar -czpf "hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz" "hadoop-${HADOOP_VERSION}"
  556. run rm -rf "hadoop-${HADOOP_VERSION}"
  557. }
  558. function signartifacts
  559. {
  560. declare i
  561. declare ret
  562. if [[ "${SIGN}" = false ]]; then
  563. echo ""
  564. echo "Remember to sign the artifacts before staging them on the open"
  565. echo ""
  566. return
  567. fi
  568. big_console_header "Signing the release"
  569. run cd "${ARTIFACTS_DIR}"
  570. for i in *; do
  571. ${GPG} --use-agent --armor --output "${i}.asc" --detach-sig "${i}"
  572. sha512sum --tag "${i}" > "${i}.sha512"
  573. done
  574. run cd "${BASEDIR}"
  575. if [[ "${ASFRELEASE}" = true ]]; then
  576. echo "Fetching the Apache Hadoop KEYS file..."
  577. curl -L "${PUBKEYFILE}" -o "${BASEDIR}/target/KEYS"
  578. ${GPG} --import --trustdb "${BASEDIR}/target/testkeysdb" "${BASEDIR}/target/KEYS"
  579. ${GPG} --verify --trustdb "${BASEDIR}/target/testkeysdb" \
  580. "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz.asc" \
  581. "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz"
  582. ret=$?
  583. if [[ ${ret} != 0 ]]; then
  584. hadoop_error "ERROR: GPG key is not present in ${PUBKEYFILE}."
  585. hadoop_error "ERROR: This MUST be fixed. Exiting."
  586. exit 1
  587. fi
  588. fi
  589. }
  590. # find root of the source tree
  591. BIN=$(hadoop_abs "${BASH_SOURCE:-$0}")
  592. PARAMS=("$@")
  593. set_defaults
  594. option_parse "${PARAMS[@]}"
  595. dockermode
  596. header
  597. if [[ -n ${RC_LABEL} ]]; then
  598. RC_LABEL="-${RC_LABEL}"
  599. fi
  600. if [[ "${INDOCKER}" = true || "${DOCKERRAN}" = false ]]; then
  601. startgpgagent
  602. makearelease
  603. releaseret=$?
  604. signartifacts
  605. stopgpgagent
  606. fi
  607. if [[ "${INDOCKER}" = true ]]; then
  608. exit $?
  609. fi
  610. if [[ ${releaseret} == 0 ]]; then
  611. echo
  612. echo "Congratulations, you have successfully built the release"
  613. echo "artifacts for Apache Hadoop ${HADOOP_VERSION}${RC_LABEL}"
  614. echo
  615. echo "The artifacts for this run are available at ${ARTIFACTS_DIR}:"
  616. run ls -1 "${ARTIFACTS_DIR}"
  617. echo
  618. fi