hadoop-functions.sh 68 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # we need to declare this globally as an array, which can only
  17. # be done outside of a function
  18. declare -a HADOOP_SUBCMD_USAGE
  19. declare -a HADOOP_OPTION_USAGE
  20. ## @description Print a message to stderr
  21. ## @audience public
  22. ## @stability stable
  23. ## @replaceable no
  24. ## @param string
  25. function hadoop_error
  26. {
  27. echo "$*" 1>&2
  28. }
  29. ## @description Print a message to stderr if --debug is turned on
  30. ## @audience public
  31. ## @stability stable
  32. ## @replaceable no
  33. ## @param string
  34. function hadoop_debug
  35. {
  36. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  37. echo "DEBUG: $*" 1>&2
  38. fi
  39. }
  40. ## @description Given a filename or dir, return the absolute version of it
  41. ## @description This works as an alternative to readlink, which isn't
  42. ## @description portable.
  43. ## @audience public
  44. ## @stability stable
  45. ## @param fsobj
  46. ## @replaceable no
  47. ## @return 0 success
  48. ## @return 1 failure
  49. ## @return stdout abspath
  50. function hadoop_abs
  51. {
  52. declare obj=$1
  53. declare dir
  54. declare fn
  55. declare dirret
  56. if [[ ! -e ${obj} ]]; then
  57. return 1
  58. elif [[ -d ${obj} ]]; then
  59. dir=${obj}
  60. else
  61. dir=$(dirname -- "${obj}")
  62. fn=$(basename -- "${obj}")
  63. fn="/${fn}"
  64. fi
  65. dir=$(cd -P -- "${dir}" >/dev/null 2>/dev/null && pwd -P)
  66. dirret=$?
  67. if [[ ${dirret} = 0 ]]; then
  68. echo "${dir}${fn}"
  69. return 0
  70. fi
  71. return 1
  72. }
  73. ## @description Given variable $1 delete $2 from it
  74. ## @audience public
  75. ## @stability stable
  76. ## @replaceable no
  77. function hadoop_delete_entry
  78. {
  79. if [[ ${!1} =~ \ ${2}\ ]] ; then
  80. hadoop_debug "Removing ${2} from ${1}"
  81. eval "${1}"=\""${!1// ${2} }"\"
  82. fi
  83. }
  84. ## @description Given variable $1 add $2 to it
  85. ## @audience public
  86. ## @stability stable
  87. ## @replaceable no
  88. function hadoop_add_entry
  89. {
  90. if [[ ! ${!1} =~ \ ${2}\ ]] ; then
  91. hadoop_debug "Adding ${2} to ${1}"
  92. #shellcheck disable=SC2140
  93. eval "${1}"=\""${!1} ${2} "\"
  94. fi
  95. }
  96. ## @description Given variable $1 determine if $2 is in it
  97. ## @audience public
  98. ## @stability stable
  99. ## @replaceable no
  100. ## @return 0 = yes, 1 = no
  101. function hadoop_verify_entry
  102. {
  103. # this unfortunately can't really be tested by bats. :(
  104. # so if this changes, be aware that unit tests effectively
  105. # do this function in them
  106. [[ ${!1} =~ \ ${2}\ ]]
  107. }
  108. ## @description Check if we are running with privilege
  109. ## @description by default, this implementation looks for
  110. ## @description EUID=0. For OSes that have true privilege
  111. ## @description separation, this should be something more complex
  112. ## @audience private
  113. ## @stability evolving
  114. ## @replaceable yes
  115. ## @return 1 = no priv
  116. ## @return 0 = priv
  117. function hadoop_privilege_check
  118. {
  119. [[ "${EUID}" = 0 ]]
  120. }
  121. ## @description Execute a command via su when running as root
  122. ## @description if the given user is found or exit with
  123. ## @description failure if not.
  124. ## @description otherwise just run it. (This is intended to
  125. ## @description be used by the start-*/stop-* scripts.)
  126. ## @audience private
  127. ## @stability evolving
  128. ## @replaceable yes
  129. ## @param user
  130. ## @param commandstring
  131. ## @return exitstatus
  132. function hadoop_su
  133. {
  134. declare user=$1
  135. shift
  136. declare idret
  137. if hadoop_privilege_check; then
  138. id -u "${user}" >/dev/null 2>&1
  139. idret=$?
  140. if [[ ${idret} != 0 ]]; then
  141. hadoop_error "ERROR: Refusing to run as root: ${user} account is not found. Aborting."
  142. return 1
  143. else
  144. su -l "${user}" -- "$@"
  145. fi
  146. else
  147. "$@"
  148. fi
  149. }
  150. ## @description Execute a command via su when running as root
  151. ## @description with extra support for commands that might
  152. ## @description legitimately start as root (e.g., datanode)
  153. ## @description (This is intended to
  154. ## @description be used by the start-*/stop-* scripts.)
  155. ## @audience private
  156. ## @stability evolving
  157. ## @replaceable no
  158. ## @param user
  159. ## @param commandstring
  160. ## @return exitstatus
  161. function hadoop_uservar_su
  162. {
  163. ## startup matrix:
  164. #
  165. # if $EUID != 0, then exec
  166. # if $EUID =0 then
  167. # if hdfs_subcmd_user is defined, call hadoop_su to exec
  168. # if hdfs_subcmd_user is not defined, error
  169. #
  170. # For secure daemons, this means both the secure and insecure env vars need to be
  171. # defined. e.g., HDFS_DATANODE_USER=root HDFS_DATANODE_SECURE_USER=hdfs
  172. # This function will pick up the "normal" var, switch to that user, then
  173. # execute the command which will then pick up the "secure" version.
  174. #
  175. declare program=$1
  176. declare command=$2
  177. shift 2
  178. declare uprogram
  179. declare ucommand
  180. declare uvar
  181. if hadoop_privilege_check; then
  182. uvar=$(hadoop_get_verify_uservar "${program}" "${command}")
  183. if [[ -n "${!uvar}" ]]; then
  184. hadoop_su "${!uvar}" "$@"
  185. else
  186. hadoop_error "ERROR: Attempting to launch ${program} ${command} as root"
  187. hadoop_error "ERROR: but there is no ${uvar} defined. Aborting launch."
  188. return 1
  189. fi
  190. else
  191. "$@"
  192. fi
  193. }
  194. ## @description Add a subcommand to the usage output
  195. ## @audience private
  196. ## @stability evolving
  197. ## @replaceable no
  198. ## @param subcommand
  199. ## @param subcommanddesc
  200. function hadoop_add_subcommand
  201. {
  202. local subcmd=$1
  203. local text=$2
  204. HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${text}"
  205. ((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
  206. }
  207. ## @description Add an option to the usage output
  208. ## @audience private
  209. ## @stability evolving
  210. ## @replaceable no
  211. ## @param subcommand
  212. ## @param subcommanddesc
  213. function hadoop_add_option
  214. {
  215. local option=$1
  216. local text=$2
  217. HADOOP_OPTION_USAGE[${HADOOP_OPTION_USAGE_COUNTER}]="${option}@${text}"
  218. ((HADOOP_OPTION_USAGE_COUNTER=HADOOP_OPTION_USAGE_COUNTER+1))
  219. }
  220. ## @description Reset the usage information to blank
  221. ## @audience private
  222. ## @stability evolving
  223. ## @replaceable no
  224. function hadoop_reset_usage
  225. {
  226. HADOOP_SUBCMD_USAGE=()
  227. HADOOP_OPTION_USAGE=()
  228. HADOOP_SUBCMD_USAGE_COUNTER=0
  229. HADOOP_OPTION_USAGE_COUNTER=0
  230. }
  231. ## @description Print a screen-size aware two-column output
  232. ## @audience private
  233. ## @stability evolving
  234. ## @replaceable no
  235. ## @param array
  236. function hadoop_generic_columnprinter
  237. {
  238. declare -a input=("$@")
  239. declare -i i=0
  240. declare -i counter=0
  241. declare line
  242. declare text
  243. declare option
  244. declare giventext
  245. declare -i maxoptsize
  246. declare -i foldsize
  247. declare -a tmpa
  248. declare numcols
  249. if [[ -n "${COLUMNS}" ]]; then
  250. numcols=${COLUMNS}
  251. else
  252. numcols=$(tput cols) 2>/dev/null
  253. fi
  254. if [[ -z "${numcols}"
  255. || ! "${numcols}" =~ ^[0-9]+$ ]]; then
  256. numcols=75
  257. else
  258. ((numcols=numcols-5))
  259. fi
  260. while read -r line; do
  261. tmpa[${counter}]=${line}
  262. ((counter=counter+1))
  263. option=$(echo "${line}" | cut -f1 -d'@')
  264. if [[ ${#option} -gt ${maxoptsize} ]]; then
  265. maxoptsize=${#option}
  266. fi
  267. done < <(for text in "${input[@]}"; do
  268. echo "${text}"
  269. done | sort)
  270. i=0
  271. ((foldsize=numcols-maxoptsize))
  272. until [[ $i -eq ${#tmpa[@]} ]]; do
  273. option=$(echo "${tmpa[$i]}" | cut -f1 -d'@')
  274. giventext=$(echo "${tmpa[$i]}" | cut -f2 -d'@')
  275. while read -r line; do
  276. printf "%-${maxoptsize}s %-s\n" "${option}" "${line}"
  277. option=" "
  278. done < <(echo "${giventext}"| fold -s -w ${foldsize})
  279. ((i=i+1))
  280. done
  281. }
  282. ## @description generate standard usage output
  283. ## @description and optionally takes a class
  284. ## @audience private
  285. ## @stability evolving
  286. ## @replaceable no
  287. ## @param execname
  288. ## @param true|false
  289. ## @param [text to use in place of SUBCOMMAND]
  290. function hadoop_generate_usage
  291. {
  292. local cmd=$1
  293. local takesclass=$2
  294. local subcmdtext=${3:-"SUBCOMMAND"}
  295. local haveoptions
  296. local optstring
  297. local havesubs
  298. local subcmdstring
  299. cmd=${cmd##*/}
  300. if [[ -n "${HADOOP_OPTION_USAGE_COUNTER}"
  301. && "${HADOOP_OPTION_USAGE_COUNTER}" -gt 0 ]]; then
  302. haveoptions=true
  303. optstring=" [OPTIONS]"
  304. fi
  305. if [[ -n "${HADOOP_SUBCMD_USAGE_COUNTER}"
  306. && "${HADOOP_SUBCMD_USAGE_COUNTER}" -gt 0 ]]; then
  307. havesubs=true
  308. subcmdstring=" ${subcmdtext} [${subcmdtext} OPTIONS]"
  309. fi
  310. echo "Usage: ${cmd}${optstring}${subcmdstring}"
  311. if [[ ${takesclass} = true ]]; then
  312. echo " or ${cmd}${optstring} CLASSNAME [CLASSNAME OPTIONS]"
  313. echo " where CLASSNAME is a user-provided Java class"
  314. fi
  315. if [[ "${haveoptions}" = true ]]; then
  316. echo ""
  317. echo " OPTIONS is none or any of:"
  318. echo ""
  319. hadoop_generic_columnprinter "${HADOOP_OPTION_USAGE[@]}"
  320. fi
  321. if [[ "${havesubs}" = true ]]; then
  322. echo ""
  323. echo " ${subcmdtext} is one of:"
  324. echo ""
  325. hadoop_generic_columnprinter "${HADOOP_SUBCMD_USAGE[@]}"
  326. echo ""
  327. echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
  328. fi
  329. }
  330. ## @description Replace `oldvar` with `newvar` if `oldvar` exists.
  331. ## @audience public
  332. ## @stability stable
  333. ## @replaceable yes
  334. ## @param oldvar
  335. ## @param newvar
  336. function hadoop_deprecate_envvar
  337. {
  338. local oldvar=$1
  339. local newvar=$2
  340. local oldval=${!oldvar}
  341. local newval=${!newvar}
  342. if [[ -n "${oldval}" ]]; then
  343. hadoop_error "WARNING: ${oldvar} has been replaced by ${newvar}. Using value of ${oldvar}."
  344. # shellcheck disable=SC2086
  345. eval ${newvar}=\"${oldval}\"
  346. # shellcheck disable=SC2086
  347. newval=${oldval}
  348. # shellcheck disable=SC2086
  349. eval ${newvar}=\"${newval}\"
  350. fi
  351. }
  352. ## @description Declare `var` being used and print its value.
  353. ## @audience public
  354. ## @stability stable
  355. ## @replaceable yes
  356. ## @param var
  357. function hadoop_using_envvar
  358. {
  359. local var=$1
  360. local val=${!var}
  361. if [[ -n "${val}" ]]; then
  362. hadoop_debug "${var} = ${val}"
  363. fi
  364. }
  365. ## @description Create the directory 'dir'.
  366. ## @audience public
  367. ## @stability stable
  368. ## @replaceable yes
  369. ## @param dir
  370. function hadoop_mkdir
  371. {
  372. local dir=$1
  373. if [[ ! -w "${dir}" ]] && [[ ! -d "${dir}" ]]; then
  374. hadoop_error "WARNING: ${dir} does not exist. Creating."
  375. if ! mkdir -p "${dir}"; then
  376. hadoop_error "ERROR: Unable to create ${dir}. Aborting."
  377. exit 1
  378. fi
  379. fi
  380. }
  381. ## @description Bootstraps the Hadoop shell environment
  382. ## @audience private
  383. ## @stability evolving
  384. ## @replaceable no
  385. function hadoop_bootstrap
  386. {
  387. # the root of the Hadoop installation
  388. # See HADOOP-6255 for the expected directory structure layout
  389. if [[ -n "${DEFAULT_LIBEXEC_DIR}" ]]; then
  390. hadoop_error "WARNING: DEFAULT_LIBEXEC_DIR ignored. It has been replaced by HADOOP_DEFAULT_LIBEXEC_DIR."
  391. fi
  392. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  393. # We can piggyback off of that to figure out where the default
  394. # HADOOP_FREFIX should be. This allows us to run without
  395. # HADOOP_HOME ever being defined by a human! As a consequence
  396. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  397. # env var within Hadoop.
  398. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  399. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  400. exit 1
  401. fi
  402. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  403. HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DEFAULT_PREFIX}
  404. export HADOOP_HOME
  405. #
  406. # short-cuts. vendors may redefine these as well, preferably
  407. # in hadoop-layouts.sh
  408. #
  409. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  410. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  411. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  412. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  413. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  414. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  415. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  416. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  417. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  418. HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}}
  419. HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
  420. HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}
  421. # by default, whatever we are about to run doesn't support
  422. # daemonization
  423. HADOOP_SUBCMD_SUPPORTDAEMONIZATION=false
  424. # by default, we have not been self-re-execed
  425. HADOOP_REEXECED_CMD=false
  426. # shellcheck disable=SC2034
  427. HADOOP_SUBCMD_SECURESERVICE=false
  428. # usage output set to zero
  429. hadoop_reset_usage
  430. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  431. # defaults
  432. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  433. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  434. }
  435. ## @description Locate Hadoop's configuration directory
  436. ## @audience private
  437. ## @stability evolving
  438. ## @replaceable no
  439. function hadoop_find_confdir
  440. {
  441. local conf_dir
  442. # An attempt at compatibility with some Hadoop 1.x
  443. # installs.
  444. if [[ -e "${HADOOP_HOME}/conf/hadoop-env.sh" ]]; then
  445. conf_dir="conf"
  446. else
  447. conf_dir="etc/hadoop"
  448. fi
  449. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_HOME}/${conf_dir}}"
  450. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  451. }
  452. ## @description Validate ${HADOOP_CONF_DIR}
  453. ## @audience public
  454. ## @stability stable
  455. ## @replaceable yes
  456. ## @return will exit on failure conditions
  457. function hadoop_verify_confdir
  458. {
  459. # Check only log4j.properties by default.
  460. # --loglevel does not work without logger settings in log4j.log4j.properties.
  461. if [[ ! -f "${HADOOP_CONF_DIR}/log4j.properties" ]]; then
  462. hadoop_error "WARNING: log4j.properties is not found. HADOOP_CONF_DIR may be incomplete."
  463. fi
  464. }
  465. ## @description Import the hadoop-env.sh settings
  466. ## @audience private
  467. ## @stability evolving
  468. ## @replaceable no
  469. function hadoop_exec_hadoopenv
  470. {
  471. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  472. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  473. export HADOOP_ENV_PROCESSED=true
  474. # shellcheck disable=SC1090
  475. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  476. fi
  477. fi
  478. }
  479. ## @description Import the replaced functions
  480. ## @audience private
  481. ## @stability evolving
  482. ## @replaceable no
  483. function hadoop_exec_userfuncs
  484. {
  485. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  486. # shellcheck disable=SC1090
  487. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  488. fi
  489. }
  490. ## @description Read the user's settings. This provides for users to
  491. ## @description override and/or append hadoop-env.sh. It is not meant
  492. ## @description as a complete system override.
  493. ## @audience private
  494. ## @stability evolving
  495. ## @replaceable yes
  496. function hadoop_exec_user_hadoopenv
  497. {
  498. if [[ -f "${HOME}/.hadoop-env" ]]; then
  499. hadoop_debug "Applying the user's .hadoop-env"
  500. # shellcheck disable=SC1090
  501. . "${HOME}/.hadoop-env"
  502. fi
  503. }
  504. ## @description Read the user's settings. This provides for users to
  505. ## @description run Hadoop Shell API after system bootstrap
  506. ## @audience private
  507. ## @stability evolving
  508. ## @replaceable yes
  509. function hadoop_exec_hadooprc
  510. {
  511. if [[ -f "${HOME}/.hadooprc" ]]; then
  512. hadoop_debug "Applying the user's .hadooprc"
  513. # shellcheck disable=SC1090
  514. . "${HOME}/.hadooprc"
  515. fi
  516. }
  517. ## @description Import shellprofile.d content
  518. ## @audience private
  519. ## @stability evolving
  520. ## @replaceable yes
  521. function hadoop_import_shellprofiles
  522. {
  523. local i
  524. local files1
  525. local files2
  526. if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
  527. files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
  528. hadoop_debug "shellprofiles: ${files1[*]}"
  529. else
  530. hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
  531. fi
  532. if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
  533. files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
  534. fi
  535. # enable bundled shellprofiles that come
  536. # from hadoop-tools. This converts the user-facing HADOOP_OPTIONAL_TOOLS
  537. # to the HADOOP_TOOLS_OPTIONS that the shell profiles expect.
  538. # See dist-tools-hooks-maker for how the example HADOOP_OPTIONAL_TOOLS
  539. # gets populated into hadoop-env.sh
  540. for i in ${HADOOP_OPTIONAL_TOOLS//,/ }; do
  541. hadoop_add_entry HADOOP_TOOLS_OPTIONS "${i}"
  542. done
  543. for i in "${files1[@]}" "${files2[@]}"
  544. do
  545. if [[ -n "${i}"
  546. && -f "${i}" ]]; then
  547. hadoop_debug "Profiles: importing ${i}"
  548. # shellcheck disable=SC1090
  549. . "${i}"
  550. fi
  551. done
  552. }
  553. ## @description Initialize the registered shell profiles
  554. ## @audience private
  555. ## @stability evolving
  556. ## @replaceable yes
  557. function hadoop_shellprofiles_init
  558. {
  559. local i
  560. for i in ${HADOOP_SHELL_PROFILES}
  561. do
  562. if declare -F _${i}_hadoop_init >/dev/null ; then
  563. hadoop_debug "Profiles: ${i} init"
  564. # shellcheck disable=SC2086
  565. _${i}_hadoop_init
  566. fi
  567. done
  568. }
  569. ## @description Apply the shell profile classpath additions
  570. ## @audience private
  571. ## @stability evolving
  572. ## @replaceable yes
  573. function hadoop_shellprofiles_classpath
  574. {
  575. local i
  576. for i in ${HADOOP_SHELL_PROFILES}
  577. do
  578. if declare -F _${i}_hadoop_classpath >/dev/null ; then
  579. hadoop_debug "Profiles: ${i} classpath"
  580. # shellcheck disable=SC2086
  581. _${i}_hadoop_classpath
  582. fi
  583. done
  584. }
  585. ## @description Apply the shell profile native library additions
  586. ## @audience private
  587. ## @stability evolving
  588. ## @replaceable yes
  589. function hadoop_shellprofiles_nativelib
  590. {
  591. local i
  592. for i in ${HADOOP_SHELL_PROFILES}
  593. do
  594. if declare -F _${i}_hadoop_nativelib >/dev/null ; then
  595. hadoop_debug "Profiles: ${i} nativelib"
  596. # shellcheck disable=SC2086
  597. _${i}_hadoop_nativelib
  598. fi
  599. done
  600. }
  601. ## @description Apply the shell profile final configuration
  602. ## @audience private
  603. ## @stability evolving
  604. ## @replaceable yes
  605. function hadoop_shellprofiles_finalize
  606. {
  607. local i
  608. for i in ${HADOOP_SHELL_PROFILES}
  609. do
  610. if declare -F _${i}_hadoop_finalize >/dev/null ; then
  611. hadoop_debug "Profiles: ${i} finalize"
  612. # shellcheck disable=SC2086
  613. _${i}_hadoop_finalize
  614. fi
  615. done
  616. }
  617. ## @description Initialize the Hadoop shell environment, now that
  618. ## @description user settings have been imported
  619. ## @audience private
  620. ## @stability evolving
  621. ## @replaceable no
  622. function hadoop_basic_init
  623. {
  624. # Some of these are also set in hadoop-env.sh.
  625. # we still set them here just in case hadoop-env.sh is
  626. # broken in some way, set up defaults, etc.
  627. #
  628. # but it is important to note that if you update these
  629. # you also need to update hadoop-env.sh as well!!!
  630. CLASSPATH=""
  631. hadoop_debug "Initialize CLASSPATH"
  632. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  633. [[ -d "${HADOOP_HOME}/${HADOOP_COMMON_DIR}" ]]; then
  634. export HADOOP_COMMON_HOME="${HADOOP_HOME}"
  635. fi
  636. # default policy file for service-level authorization
  637. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  638. # define HADOOP_HDFS_HOME
  639. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  640. [[ -d "${HADOOP_HOME}/${HDFS_DIR}" ]]; then
  641. export HADOOP_HDFS_HOME="${HADOOP_HOME}"
  642. fi
  643. # define HADOOP_YARN_HOME
  644. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  645. [[ -d "${HADOOP_HOME}/${YARN_DIR}" ]]; then
  646. export HADOOP_YARN_HOME="${HADOOP_HOME}"
  647. fi
  648. # define HADOOP_MAPRED_HOME
  649. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  650. [[ -d "${HADOOP_HOME}/${MAPRED_DIR}" ]]; then
  651. export HADOOP_MAPRED_HOME="${HADOOP_HOME}"
  652. fi
  653. if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
  654. hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
  655. exit 1
  656. fi
  657. if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
  658. hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
  659. exit 1
  660. fi
  661. if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
  662. hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
  663. exit 1
  664. fi
  665. if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
  666. hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
  667. exit 1
  668. fi
  669. # if for some reason the shell doesn't have $USER defined
  670. # (e.g., ssh'd in to execute a command)
  671. # let's get the effective username and use that
  672. USER=${USER:-$(id -nu)}
  673. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  674. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_HOME}/logs"}
  675. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  676. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  677. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  678. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  679. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  680. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  681. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  682. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  683. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  684. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  685. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  686. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  687. }
  688. ## @description Set the worker support information to the contents
  689. ## @description of `filename`
  690. ## @audience public
  691. ## @stability stable
  692. ## @replaceable no
  693. ## @param filename
  694. ## @return will exit if file does not exist
  695. function hadoop_populate_workers_file
  696. {
  697. local workersfile=$1
  698. shift
  699. if [[ -f "${workersfile}" ]]; then
  700. # shellcheck disable=2034
  701. HADOOP_WORKERS="${workersfile}"
  702. elif [[ -f "${HADOOP_CONF_DIR}/${workersfile}" ]]; then
  703. # shellcheck disable=2034
  704. HADOOP_WORKERS="${HADOOP_CONF_DIR}/${workersfile}"
  705. else
  706. hadoop_error "ERROR: Cannot find hosts file \"${workersfile}\""
  707. hadoop_exit_with_usage 1
  708. fi
  709. }
  710. ## @description Rotates the given `file` until `number` of
  711. ## @description files exist.
  712. ## @audience public
  713. ## @stability stable
  714. ## @replaceable no
  715. ## @param filename
  716. ## @param [number]
  717. ## @return $? will contain last mv's return value
  718. function hadoop_rotate_log
  719. {
  720. #
  721. # Users are likely to replace this one for something
  722. # that gzips or uses dates or who knows what.
  723. #
  724. # be aware that &1 and &2 might go through here
  725. # so don't do anything too crazy...
  726. #
  727. local log=$1;
  728. local num=${2:-5};
  729. if [[ -f "${log}" ]]; then # rotate logs
  730. while [[ ${num} -gt 1 ]]; do
  731. #shellcheck disable=SC2086
  732. let prev=${num}-1
  733. if [[ -f "${log}.${prev}" ]]; then
  734. mv "${log}.${prev}" "${log}.${num}"
  735. fi
  736. num=${prev}
  737. done
  738. mv "${log}" "${log}.${num}"
  739. fi
  740. }
  741. ## @description Via ssh, log into `hostname` and run `command`
  742. ## @audience private
  743. ## @stability evolving
  744. ## @replaceable yes
  745. ## @param hostname
  746. ## @param command
  747. ## @param [...]
  748. function hadoop_actual_ssh
  749. {
  750. # we are passing this function to xargs
  751. # should get hostname followed by rest of command line
  752. local worker=$1
  753. shift
  754. # shellcheck disable=SC2086
  755. ssh ${HADOOP_SSH_OPTS} ${worker} $"${@// /\\ }" 2>&1 | sed "s/^/$worker: /"
  756. }
  757. ## @description Connect to ${HADOOP_WORKERS} or ${HADOOP_WORKER_NAMES}
  758. ## @description and execute command.
  759. ## @audience private
  760. ## @stability evolving
  761. ## @replaceable yes
  762. ## @param command
  763. ## @param [...]
  764. function hadoop_connect_to_hosts
  765. {
  766. # shellcheck disable=SC2124
  767. local params="$@"
  768. local worker_file
  769. local tmpslvnames
  770. #
  771. # ssh (or whatever) to a host
  772. #
  773. # User can specify hostnames or a file where the hostnames are (not both)
  774. if [[ -n "${HADOOP_WORKERS}" && -n "${HADOOP_WORKER_NAMES}" ]] ; then
  775. hadoop_error "ERROR: Both HADOOP_WORKERS and HADOOP_WORKER_NAMES were defined. Aborting."
  776. exit 1
  777. elif [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  778. if [[ -n "${HADOOP_WORKERS}" ]]; then
  779. worker_file=${HADOOP_WORKERS}
  780. elif [[ -f "${HADOOP_CONF_DIR}/workers" ]]; then
  781. worker_file=${HADOOP_CONF_DIR}/workers
  782. elif [[ -f "${HADOOP_CONF_DIR}/slaves" ]]; then
  783. hadoop_error "WARNING: 'slaves' file has been deprecated. Please use 'workers' file instead."
  784. worker_file=${HADOOP_CONF_DIR}/slaves
  785. fi
  786. fi
  787. # if pdsh is available, let's use it. otherwise default
  788. # to a loop around ssh. (ugh)
  789. if [[ -e '/usr/bin/pdsh' ]]; then
  790. if [[ -z "${HADOOP_WORKER_NAMES}" ]] ; then
  791. # if we were given a file, just let pdsh deal with it.
  792. # shellcheck disable=SC2086
  793. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  794. -f "${HADOOP_SSH_PARALLEL}" -w ^"${worker_file}" $"${@// /\\ }" 2>&1
  795. else
  796. # no spaces allowed in the pdsh arg host list
  797. # shellcheck disable=SC2086
  798. tmpslvnames=$(echo ${HADOOP_WORKER_NAMES} | tr -s ' ' ,)
  799. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  800. -f "${HADOOP_SSH_PARALLEL}" \
  801. -w "${tmpslvnames}" $"${@// /\\ }" 2>&1
  802. fi
  803. else
  804. if [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  805. HADOOP_WORKER_NAMES=$(sed 's/#.*$//;/^$/d' "${worker_file}")
  806. fi
  807. hadoop_connect_to_hosts_without_pdsh "${params}"
  808. fi
  809. }
  810. ## @description Connect to ${HADOOP_WORKER_NAMES} and execute command
  811. ## @description under the environment which does not support pdsh.
  812. ## @audience private
  813. ## @stability evolving
  814. ## @replaceable yes
  815. ## @param command
  816. ## @param [...]
  817. function hadoop_connect_to_hosts_without_pdsh
  818. {
  819. # shellcheck disable=SC2124
  820. local params="$@"
  821. local workers=(${HADOOP_WORKER_NAMES})
  822. for (( i = 0; i < ${#workers[@]}; i++ ))
  823. do
  824. if (( i != 0 && i % HADOOP_SSH_PARALLEL == 0 )); then
  825. wait
  826. fi
  827. # shellcheck disable=SC2086
  828. hadoop_actual_ssh "${workers[$i]}" ${params} &
  829. done
  830. wait
  831. }
  832. ## @description Utility routine to handle --workers mode
  833. ## @audience private
  834. ## @stability evolving
  835. ## @replaceable yes
  836. ## @param commandarray
  837. function hadoop_common_worker_mode_execute
  838. {
  839. #
  840. # input should be the command line as given by the user
  841. # in the form of an array
  842. #
  843. local argv=("$@")
  844. # if --workers is still on the command line, remove it
  845. # to prevent loops
  846. # Also remove --hostnames and --hosts along with arg values
  847. local argsSize=${#argv[@]};
  848. for (( i = 0; i < argsSize; i++ ))
  849. do
  850. if [[ "${argv[$i]}" =~ ^--workers$ ]]; then
  851. unset argv[$i]
  852. elif [[ "${argv[$i]}" =~ ^--hostnames$ ]] ||
  853. [[ "${argv[$i]}" =~ ^--hosts$ ]]; then
  854. unset argv[$i];
  855. let i++;
  856. unset argv[$i];
  857. fi
  858. done
  859. if [[ ${QATESTMODE} = true ]]; then
  860. echo "${argv[@]}"
  861. return
  862. fi
  863. hadoop_connect_to_hosts -- "${argv[@]}"
  864. }
  865. ## @description Verify that a shell command was passed a valid
  866. ## @description class name
  867. ## @audience public
  868. ## @stability stable
  869. ## @replaceable yes
  870. ## @param classname
  871. ## @return 0 = success
  872. ## @return 1 = failure w/user message
  873. function hadoop_validate_classname
  874. {
  875. local class=$1
  876. shift 1
  877. if [[ ! ${class} =~ \. ]]; then
  878. # assuming the arg is typo of command if it does not conatain ".".
  879. # class belonging to no package is not allowed as a result.
  880. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  881. return 1
  882. fi
  883. return 0
  884. }
  885. ## @description Append the `appendstring` if `checkstring` is not
  886. ## @description present in the given `envvar`
  887. ## @audience public
  888. ## @stability stable
  889. ## @replaceable yes
  890. ## @param envvar
  891. ## @param checkstring
  892. ## @param appendstring
  893. function hadoop_add_param
  894. {
  895. #
  896. # general param dedupe..
  897. # $1 is what we are adding to
  898. # $2 is the name of what we want to add (key)
  899. # $3 is the key+value of what we're adding
  900. #
  901. # doing it this way allows us to support all sorts of
  902. # different syntaxes, just so long as they are space
  903. # delimited
  904. #
  905. if [[ ! ${!1} =~ $2 ]] ; then
  906. #shellcheck disable=SC2140
  907. eval "$1"="'${!1} $3'"
  908. if [[ ${!1:0:1} = ' ' ]]; then
  909. #shellcheck disable=SC2140
  910. eval "$1"="'${!1# }'"
  911. fi
  912. hadoop_debug "$1 accepted $3"
  913. else
  914. hadoop_debug "$1 declined $3"
  915. fi
  916. }
  917. ## @description Register the given `shellprofile` to the Hadoop
  918. ## @description shell subsystem
  919. ## @audience public
  920. ## @stability stable
  921. ## @replaceable yes
  922. ## @param shellprofile
  923. function hadoop_add_profile
  924. {
  925. # shellcheck disable=SC2086
  926. hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
  927. }
  928. ## @description Add a file system object (directory, file,
  929. ## @description wildcard, ...) to the classpath. Optionally provide
  930. ## @description a hint as to where in the classpath it should go.
  931. ## @audience public
  932. ## @stability stable
  933. ## @replaceable yes
  934. ## @param object
  935. ## @param [before|after]
  936. ## @return 0 = success (added or duplicate)
  937. ## @return 1 = failure (doesn't exist or some other reason)
  938. function hadoop_add_classpath
  939. {
  940. # However, with classpath (& JLP), we can do dedupe
  941. # along with some sanity checking (e.g., missing directories)
  942. # since we have a better idea of what is legal
  943. #
  944. # for wildcard at end, we can
  945. # at least check the dir exists
  946. if [[ $1 =~ ^.*\*$ ]]; then
  947. local mp
  948. mp=$(dirname "$1")
  949. if [[ ! -d "${mp}" ]]; then
  950. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  951. return 1
  952. fi
  953. # no wildcard in the middle, so check existence
  954. # (doesn't matter *what* it is)
  955. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  956. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  957. return 1
  958. fi
  959. if [[ -z "${CLASSPATH}" ]]; then
  960. CLASSPATH=$1
  961. hadoop_debug "Initial CLASSPATH=$1"
  962. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  963. if [[ "$2" = "before" ]]; then
  964. CLASSPATH="$1:${CLASSPATH}"
  965. hadoop_debug "Prepend CLASSPATH: $1"
  966. else
  967. CLASSPATH+=:$1
  968. hadoop_debug "Append CLASSPATH: $1"
  969. fi
  970. else
  971. hadoop_debug "Dupe CLASSPATH: $1"
  972. fi
  973. return 0
  974. }
  975. ## @description Add a file system object (directory, file,
  976. ## @description wildcard, ...) to the colonpath. Optionally provide
  977. ## @description a hint as to where in the colonpath it should go.
  978. ## @description Prior to adding, objects are checked for duplication
  979. ## @description and check for existence. Many other functions use
  980. ## @description this function as their base implementation
  981. ## @description including `hadoop_add_javalibpath` and `hadoop_add_ldlibpath`.
  982. ## @audience public
  983. ## @stability stable
  984. ## @replaceable yes
  985. ## @param envvar
  986. ## @param object
  987. ## @param [before|after]
  988. ## @return 0 = success (added or duplicate)
  989. ## @return 1 = failure (doesn't exist or some other reason)
  990. function hadoop_add_colonpath
  991. {
  992. # this is CLASSPATH, JLP, etc but with dedupe but no
  993. # other checking
  994. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  995. if [[ -z "${!1}" ]]; then
  996. # shellcheck disable=SC2086
  997. eval $1="'$2'"
  998. hadoop_debug "Initial colonpath($1): $2"
  999. elif [[ "$3" = "before" ]]; then
  1000. # shellcheck disable=SC2086
  1001. eval $1="'$2:${!1}'"
  1002. hadoop_debug "Prepend colonpath($1): $2"
  1003. else
  1004. # shellcheck disable=SC2086
  1005. eval $1+=":'$2'"
  1006. hadoop_debug "Append colonpath($1): $2"
  1007. fi
  1008. return 0
  1009. fi
  1010. hadoop_debug "Rejected colonpath($1): $2"
  1011. return 1
  1012. }
  1013. ## @description Add a file system object (directory, file,
  1014. ## @description wildcard, ...) to the Java JNI path. Optionally
  1015. ## @description provide a hint as to where in the Java JNI path
  1016. ## @description it should go.
  1017. ## @audience public
  1018. ## @stability stable
  1019. ## @replaceable yes
  1020. ## @param object
  1021. ## @param [before|after]
  1022. ## @return 0 = success (added or duplicate)
  1023. ## @return 1 = failure (doesn't exist or some other reason)
  1024. function hadoop_add_javalibpath
  1025. {
  1026. # specialized function for a common use case
  1027. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  1028. }
  1029. ## @description Add a file system object (directory, file,
  1030. ## @description wildcard, ...) to the LD_LIBRARY_PATH. Optionally
  1031. ## @description provide a hint as to where in the LD_LIBRARY_PATH
  1032. ## @description it should go.
  1033. ## @audience public
  1034. ## @stability stable
  1035. ## @replaceable yes
  1036. ## @param object
  1037. ## @param [before|after]
  1038. ## @return 0 = success (added or duplicate)
  1039. ## @return 1 = failure (doesn't exist or some other reason)
  1040. function hadoop_add_ldlibpath
  1041. {
  1042. local status
  1043. # specialized function for a common use case
  1044. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  1045. status=$?
  1046. # note that we export this
  1047. export LD_LIBRARY_PATH
  1048. return ${status}
  1049. }
  1050. ## @description Add the common/core Hadoop components to the
  1051. ## @description environment
  1052. ## @audience private
  1053. ## @stability evolving
  1054. ## @replaceable yes
  1055. ## @returns 1 on failure, may exit
  1056. ## @returns 0 on success
  1057. function hadoop_add_common_to_classpath
  1058. {
  1059. #
  1060. # get all of the common jars+config in the path
  1061. #
  1062. if [[ -z "${HADOOP_COMMON_HOME}"
  1063. || -z "${HADOOP_COMMON_DIR}"
  1064. || -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
  1065. hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
  1066. hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
  1067. hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
  1068. hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
  1069. exit 1
  1070. fi
  1071. # developers
  1072. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  1073. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  1074. fi
  1075. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  1076. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  1077. }
  1078. ## @description Run libexec/tools/module.sh to add to the classpath
  1079. ## @description environment
  1080. ## @audience private
  1081. ## @stability evolving
  1082. ## @replaceable yes
  1083. ## @param module
  1084. function hadoop_add_to_classpath_tools
  1085. {
  1086. declare module=$1
  1087. if [[ -f "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh" ]]; then
  1088. # shellcheck disable=SC1090
  1089. . "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh"
  1090. else
  1091. hadoop_error "ERROR: Tools helper ${HADOOP_LIBEXEC_DIR}/tools/${module}.sh was not found."
  1092. fi
  1093. if declare -f hadoop_classpath_tools_${module} >/dev/null 2>&1; then
  1094. "hadoop_classpath_tools_${module}"
  1095. fi
  1096. }
  1097. ## @description Add the user's custom classpath settings to the
  1098. ## @description environment
  1099. ## @audience private
  1100. ## @stability evolving
  1101. ## @replaceable yes
  1102. function hadoop_add_to_classpath_userpath
  1103. {
  1104. # Add the user-specified HADOOP_CLASSPATH to the
  1105. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  1106. # is not set.
  1107. # Add it first or last depending on if user has
  1108. # set env-var HADOOP_USER_CLASSPATH_FIRST
  1109. # we'll also dedupe it, because we're cool like that.
  1110. #
  1111. declare -a array
  1112. declare -i c=0
  1113. declare -i j
  1114. declare -i i
  1115. declare idx
  1116. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  1117. # I wonder if Java runs on VMS.
  1118. for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  1119. array[${c}]=${idx}
  1120. ((c=c+1))
  1121. done
  1122. # bats gets confused by j getting set to 0
  1123. ((j=c-1)) || ${QATESTMODE}
  1124. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  1125. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  1126. for ((i=0; i<=j; i++)); do
  1127. hadoop_add_classpath "${array[$i]}" after
  1128. done
  1129. else
  1130. for ((i=j; i>=0; i--)); do
  1131. hadoop_add_classpath "${array[$i]}" before
  1132. done
  1133. fi
  1134. fi
  1135. fi
  1136. }
  1137. ## @description Routine to configure any OS-specific settings.
  1138. ## @audience public
  1139. ## @stability stable
  1140. ## @replaceable yes
  1141. ## @return may exit on failure conditions
  1142. function hadoop_os_tricks
  1143. {
  1144. local bindv6only
  1145. HADOOP_IS_CYGWIN=false
  1146. case ${HADOOP_OS_TYPE} in
  1147. Darwin)
  1148. if [[ -z "${JAVA_HOME}" ]]; then
  1149. if [[ -x /usr/libexec/java_home ]]; then
  1150. JAVA_HOME="$(/usr/libexec/java_home)"
  1151. export JAVA_HOME
  1152. else
  1153. JAVA_HOME=/Library/Java/Home
  1154. export JAVA_HOME
  1155. fi
  1156. fi
  1157. ;;
  1158. Linux)
  1159. # Newer versions of glibc use an arena memory allocator that
  1160. # causes virtual # memory usage to explode. This interacts badly
  1161. # with the many threads that we use in Hadoop. Tune the variable
  1162. # down to prevent vmem explosion.
  1163. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  1164. # we put this in QA test mode off so that non-Linux can test
  1165. if [[ "${QATESTMODE}" = true ]]; then
  1166. return
  1167. fi
  1168. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  1169. # undocumented in hadoop-env.sh because we don't want users to
  1170. # shoot themselves in the foot while devs make IPv6 work.
  1171. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  1172. if [[ -n "${bindv6only}" ]] &&
  1173. [[ "${bindv6only}" -eq "1" ]] &&
  1174. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  1175. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  1176. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  1177. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  1178. exit 1
  1179. fi
  1180. ;;
  1181. CYGWIN*)
  1182. # Flag that we're running on Cygwin to trigger path translation later.
  1183. HADOOP_IS_CYGWIN=true
  1184. ;;
  1185. esac
  1186. }
  1187. ## @description Configure/verify ${JAVA_HOME}
  1188. ## @audience public
  1189. ## @stability stable
  1190. ## @replaceable yes
  1191. ## @return may exit on failure conditions
  1192. function hadoop_java_setup
  1193. {
  1194. # Bail if we did not detect it
  1195. if [[ -z "${JAVA_HOME}" ]]; then
  1196. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  1197. exit 1
  1198. fi
  1199. if [[ ! -d "${JAVA_HOME}" ]]; then
  1200. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  1201. exit 1
  1202. fi
  1203. JAVA="${JAVA_HOME}/bin/java"
  1204. if [[ ! -x "$JAVA" ]]; then
  1205. hadoop_error "ERROR: $JAVA is not executable."
  1206. exit 1
  1207. fi
  1208. }
  1209. ## @description Finish Java JNI paths prior to execution
  1210. ## @audience private
  1211. ## @stability evolving
  1212. ## @replaceable yes
  1213. function hadoop_finalize_libpaths
  1214. {
  1215. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1216. hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
  1217. hadoop_add_param HADOOP_OPTS java.library.path \
  1218. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1219. export LD_LIBRARY_PATH
  1220. fi
  1221. }
  1222. ## @description Finish Java heap parameters prior to execution
  1223. ## @audience private
  1224. ## @stability evolving
  1225. ## @replaceable yes
  1226. function hadoop_finalize_hadoop_heap
  1227. {
  1228. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  1229. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  1230. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  1231. fi
  1232. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  1233. fi
  1234. # backwards compatibility
  1235. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  1236. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  1237. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  1238. fi
  1239. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  1240. fi
  1241. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  1242. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  1243. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  1244. fi
  1245. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  1246. fi
  1247. }
  1248. ## @description Converts the contents of the variable name
  1249. ## @description `varnameref` into the equivalent Windows path.
  1250. ## @description If the second parameter is true, then `varnameref`
  1251. ## @description is treated as though it was a path list.
  1252. ## @audience public
  1253. ## @stability stable
  1254. ## @replaceable yes
  1255. ## @param varnameref
  1256. ## @param [true]
  1257. function hadoop_translate_cygwin_path
  1258. {
  1259. if [[ "${HADOOP_IS_CYGWIN}" = "true" ]]; then
  1260. if [[ "$2" = "true" ]]; then
  1261. #shellcheck disable=SC2016
  1262. eval "$1"='$(cygpath -p -w "${!1}" 2>/dev/null)'
  1263. else
  1264. #shellcheck disable=SC2016
  1265. eval "$1"='$(cygpath -w "${!1}" 2>/dev/null)'
  1266. fi
  1267. fi
  1268. }
  1269. ## @description Adds the HADOOP_CLIENT_OPTS variable to
  1270. ## @description HADOOP_OPTS if HADOOP_SUBCMD_SUPPORTDAEMONIZATION is false
  1271. ## @audience public
  1272. ## @stability stable
  1273. ## @replaceable yes
  1274. function hadoop_add_client_opts
  1275. {
  1276. if [[ "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" = false
  1277. || -z "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
  1278. hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
  1279. HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
  1280. fi
  1281. }
  1282. ## @description Finish configuring Hadoop specific system properties
  1283. ## @description prior to executing Java
  1284. ## @audience private
  1285. ## @stability evolving
  1286. ## @replaceable yes
  1287. function hadoop_finalize_hadoop_opts
  1288. {
  1289. hadoop_translate_cygwin_path HADOOP_LOG_DIR
  1290. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  1291. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  1292. hadoop_translate_cygwin_path HADOOP_HOME
  1293. export HADOOP_HOME
  1294. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1295. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  1296. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  1297. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  1298. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  1299. }
  1300. ## @description Finish Java classpath prior to execution
  1301. ## @audience private
  1302. ## @stability evolving
  1303. ## @replaceable yes
  1304. function hadoop_finalize_classpath
  1305. {
  1306. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  1307. # user classpath gets added at the last minute. this allows
  1308. # override of CONF dirs and more
  1309. hadoop_add_to_classpath_userpath
  1310. hadoop_translate_cygwin_path CLASSPATH true
  1311. }
  1312. ## @description Finish Catalina configuration prior to execution
  1313. ## @audience private
  1314. ## @stability evolving
  1315. ## @replaceable yes
  1316. function hadoop_finalize_catalina_opts
  1317. {
  1318. local prefix=${HADOOP_CATALINA_PREFIX}
  1319. hadoop_add_param CATALINA_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1320. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1321. hadoop_add_param CATALINA_OPTS java.library.path "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1322. fi
  1323. hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" "-D${prefix}.home.dir=${HADOOP_HOME}"
  1324. hadoop_add_param CATALINA_OPTS "${prefix}.config.dir" "-D${prefix}.config.dir=${HADOOP_CATALINA_CONFIG}"
  1325. hadoop_add_param CATALINA_OPTS "${prefix}.log.dir" "-D${prefix}.log.dir=${HADOOP_CATALINA_LOG}"
  1326. hadoop_add_param CATALINA_OPTS "${prefix}.temp.dir" "-D${prefix}.temp.dir=${HADOOP_CATALINA_TEMP}"
  1327. hadoop_add_param CATALINA_OPTS "${prefix}.admin.port" "-D${prefix}.admin.port=${HADOOP_CATALINA_ADMIN_PORT}"
  1328. hadoop_add_param CATALINA_OPTS "${prefix}.http.port" "-D${prefix}.http.port=${HADOOP_CATALINA_HTTP_PORT}"
  1329. hadoop_add_param CATALINA_OPTS "${prefix}.max.threads" "-D${prefix}.max.threads=${HADOOP_CATALINA_MAX_THREADS}"
  1330. hadoop_add_param CATALINA_OPTS "${prefix}.max.http.header.size" "-D${prefix}.max.http.header.size=${HADOOP_CATALINA_MAX_HTTP_HEADER_SIZE}"
  1331. hadoop_add_param CATALINA_OPTS "${prefix}.ssl.keystore.file" "-D${prefix}.ssl.keystore.file=${HADOOP_CATALINA_SSL_KEYSTORE_FILE}"
  1332. }
  1333. ## @description Finish all the remaining environment settings prior
  1334. ## @description to executing Java. This is a wrapper that calls
  1335. ## @description the other `finalize` routines.
  1336. ## @audience private
  1337. ## @stability evolving
  1338. ## @replaceable yes
  1339. function hadoop_finalize
  1340. {
  1341. hadoop_shellprofiles_finalize
  1342. hadoop_finalize_classpath
  1343. hadoop_finalize_libpaths
  1344. hadoop_finalize_hadoop_heap
  1345. hadoop_finalize_hadoop_opts
  1346. hadoop_translate_cygwin_path HADOOP_HOME
  1347. hadoop_translate_cygwin_path HADOOP_CONF_DIR
  1348. hadoop_translate_cygwin_path HADOOP_COMMON_HOME
  1349. hadoop_translate_cygwin_path HADOOP_HDFS_HOME
  1350. hadoop_translate_cygwin_path HADOOP_YARN_HOME
  1351. hadoop_translate_cygwin_path HADOOP_MAPRED_HOME
  1352. }
  1353. ## @description Print usage information and exit with the passed
  1354. ## @description `exitcode`
  1355. ## @audience public
  1356. ## @stability stable
  1357. ## @replaceable no
  1358. ## @param exitcode
  1359. ## @return This function will always exit.
  1360. function hadoop_exit_with_usage
  1361. {
  1362. local exitcode=$1
  1363. if [[ -z $exitcode ]]; then
  1364. exitcode=1
  1365. fi
  1366. # shellcheck disable=SC2034
  1367. if declare -F hadoop_usage >/dev/null ; then
  1368. hadoop_usage
  1369. elif [[ -x /usr/bin/cowsay ]]; then
  1370. /usr/bin/cowsay -f elephant "Sorry, no help available."
  1371. else
  1372. hadoop_error "Sorry, no help available."
  1373. fi
  1374. exit $exitcode
  1375. }
  1376. ## @description Verify that prerequisites have been met prior to
  1377. ## @description excuting a privileged program.
  1378. ## @audience private
  1379. ## @stability evolving
  1380. ## @replaceable yes
  1381. ## @return This routine may exit.
  1382. function hadoop_verify_secure_prereq
  1383. {
  1384. # if you are on an OS like Illumos that has functional roles
  1385. # and you are using pfexec, you'll probably want to change
  1386. # this.
  1387. if ! hadoop_privilege_check && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  1388. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  1389. exit 1
  1390. else
  1391. return 0
  1392. fi
  1393. }
  1394. ## @audience private
  1395. ## @stability evolving
  1396. ## @replaceable yes
  1397. function hadoop_setup_secure_service
  1398. {
  1399. # need a more complicated setup? replace me!
  1400. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  1401. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  1402. }
  1403. ## @audience private
  1404. ## @stability evolving
  1405. ## @replaceable yes
  1406. function hadoop_verify_piddir
  1407. {
  1408. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  1409. hadoop_error "No pid directory defined."
  1410. exit 1
  1411. fi
  1412. hadoop_mkdir "${HADOOP_PID_DIR}"
  1413. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1414. if [[ $? -gt 0 ]]; then
  1415. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  1416. exit 1
  1417. fi
  1418. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1419. }
  1420. ## @audience private
  1421. ## @stability evolving
  1422. ## @replaceable yes
  1423. function hadoop_verify_logdir
  1424. {
  1425. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  1426. hadoop_error "No log directory defined."
  1427. exit 1
  1428. fi
  1429. hadoop_mkdir "${HADOOP_LOG_DIR}"
  1430. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1431. if [[ $? -gt 0 ]]; then
  1432. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  1433. exit 1
  1434. fi
  1435. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1436. }
  1437. ## @description Determine the status of the daemon referenced
  1438. ## @description by `pidfile`
  1439. ## @audience public
  1440. ## @stability stable
  1441. ## @replaceable yes
  1442. ## @param pidfile
  1443. ## @return (mostly) LSB 4.1.0 compatible status
  1444. function hadoop_status_daemon
  1445. {
  1446. #
  1447. # LSB 4.1.0 compatible status command (1)
  1448. #
  1449. # 0 = program is running
  1450. # 1 = dead, but still a pid (2)
  1451. # 2 = (not used by us)
  1452. # 3 = not running
  1453. #
  1454. # 1 - this is not an endorsement of the LSB
  1455. #
  1456. # 2 - technically, the specification says /var/run/pid, so
  1457. # we should never return this value, but we're giving
  1458. # them the benefit of a doubt and returning 1 even if
  1459. # our pid is not in in /var/run .
  1460. #
  1461. local pidfile=$1
  1462. shift
  1463. local pid
  1464. if [[ -f "${pidfile}" ]]; then
  1465. pid=$(cat "${pidfile}")
  1466. if ps -p "${pid}" > /dev/null 2>&1; then
  1467. return 0
  1468. fi
  1469. return 1
  1470. fi
  1471. return 3
  1472. }
  1473. ## @description Execute the Java `class`, passing along any `options`.
  1474. ## @description Additionally, set the Java property -Dproc_`command`.
  1475. ## @audience public
  1476. ## @stability stable
  1477. ## @replaceable yes
  1478. ## @param command
  1479. ## @param class
  1480. ## @param [options]
  1481. function hadoop_java_exec
  1482. {
  1483. # run a java command. this is used for
  1484. # non-daemons
  1485. local command=$1
  1486. local class=$2
  1487. shift 2
  1488. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1489. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1490. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1491. hadoop_debug "java: ${JAVA}"
  1492. hadoop_debug "Class name: ${class}"
  1493. hadoop_debug "Command line options: $*"
  1494. export CLASSPATH
  1495. #shellcheck disable=SC2086
  1496. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1497. }
  1498. ## @description Start a non-privileged daemon in the foreground.
  1499. ## @audience private
  1500. ## @stability evolving
  1501. ## @replaceable yes
  1502. ## @param command
  1503. ## @param class
  1504. ## @param pidfile
  1505. ## @param [options]
  1506. function hadoop_start_daemon
  1507. {
  1508. # this is our non-privileged daemon starter
  1509. # that fires up a daemon in the *foreground*
  1510. # so complex! so wow! much java!
  1511. local command=$1
  1512. local class=$2
  1513. local pidfile=$3
  1514. shift 3
  1515. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1516. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1517. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1518. hadoop_debug "java: ${JAVA}"
  1519. hadoop_debug "Class name: ${class}"
  1520. hadoop_debug "Command line options: $*"
  1521. # this is for the non-daemon pid creation
  1522. #shellcheck disable=SC2086
  1523. echo $$ > "${pidfile}" 2>/dev/null
  1524. if [[ $? -gt 0 ]]; then
  1525. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  1526. fi
  1527. export CLASSPATH
  1528. #shellcheck disable=SC2086
  1529. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1530. }
  1531. ## @description Start a non-privileged daemon in the background.
  1532. ## @audience private
  1533. ## @stability evolving
  1534. ## @replaceable yes
  1535. ## @param command
  1536. ## @param class
  1537. ## @param pidfile
  1538. ## @param outfile
  1539. ## @param [options]
  1540. function hadoop_start_daemon_wrapper
  1541. {
  1542. local daemonname=$1
  1543. local class=$2
  1544. local pidfile=$3
  1545. local outfile=$4
  1546. shift 4
  1547. local counter
  1548. hadoop_rotate_log "${outfile}"
  1549. hadoop_start_daemon "${daemonname}" \
  1550. "$class" \
  1551. "${pidfile}" \
  1552. "$@" >> "${outfile}" 2>&1 < /dev/null &
  1553. # we need to avoid a race condition here
  1554. # so let's wait for the fork to finish
  1555. # before overriding with the daemonized pid
  1556. (( counter=0 ))
  1557. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  1558. sleep 1
  1559. (( counter++ ))
  1560. done
  1561. # this is for daemon pid creation
  1562. #shellcheck disable=SC2086
  1563. echo $! > "${pidfile}" 2>/dev/null
  1564. if [[ $? -gt 0 ]]; then
  1565. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  1566. fi
  1567. # shellcheck disable=SC2086
  1568. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1569. if [[ $? -gt 0 ]]; then
  1570. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1571. fi
  1572. # shellcheck disable=SC2086
  1573. disown %+ >/dev/null 2>&1
  1574. if [[ $? -gt 0 ]]; then
  1575. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1576. fi
  1577. sleep 1
  1578. # capture the ulimit output
  1579. ulimit -a >> "${outfile}" 2>&1
  1580. # shellcheck disable=SC2086
  1581. if ! ps -p $! >/dev/null 2>&1; then
  1582. return 1
  1583. fi
  1584. return 0
  1585. }
  1586. ## @description Start a privileged daemon in the foreground.
  1587. ## @audience private
  1588. ## @stability evolving
  1589. ## @replaceable yes
  1590. ## @param command
  1591. ## @param class
  1592. ## @param daemonpidfile
  1593. ## @param daemonoutfile
  1594. ## @param daemonerrfile
  1595. ## @param wrapperpidfile
  1596. ## @param [options]
  1597. function hadoop_start_secure_daemon
  1598. {
  1599. # this is used to launch a secure daemon in the *foreground*
  1600. #
  1601. local daemonname=$1
  1602. local class=$2
  1603. # pid file to create for our daemon
  1604. local daemonpidfile=$3
  1605. # where to send stdout. jsvc has bad habits so this *may* be &1
  1606. # which means you send it to stdout!
  1607. local daemonoutfile=$4
  1608. # where to send stderr. same thing, except &2 = stderr
  1609. local daemonerrfile=$5
  1610. local privpidfile=$6
  1611. shift 6
  1612. hadoop_rotate_log "${daemonoutfile}"
  1613. hadoop_rotate_log "${daemonerrfile}"
  1614. # shellcheck disable=SC2153
  1615. jsvc="${JSVC_HOME}/jsvc"
  1616. if [[ ! -f "${jsvc}" ]]; then
  1617. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  1618. hadoop_error "or privileged daemons. Please download and install jsvc from "
  1619. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  1620. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  1621. exit 1
  1622. fi
  1623. # note that shellcheck will throw a
  1624. # bogus for-our-use-case 2086 here.
  1625. # it doesn't properly support multi-line situations
  1626. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1627. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1628. hadoop_debug "Final JSVC_HOME: ${JSVC_HOME}"
  1629. hadoop_debug "jsvc: ${jsvc}"
  1630. hadoop_debug "Class name: ${class}"
  1631. hadoop_debug "Command line options: $*"
  1632. #shellcheck disable=SC2086
  1633. echo $$ > "${privpidfile}" 2>/dev/null
  1634. if [[ $? -gt 0 ]]; then
  1635. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  1636. fi
  1637. # shellcheck disable=SC2086
  1638. exec "${jsvc}" \
  1639. "-Dproc_${daemonname}" \
  1640. -outfile "${daemonoutfile}" \
  1641. -errfile "${daemonerrfile}" \
  1642. -pidfile "${daemonpidfile}" \
  1643. -nodetach \
  1644. -user "${HADOOP_SECURE_USER}" \
  1645. -cp "${CLASSPATH}" \
  1646. ${HADOOP_OPTS} \
  1647. "${class}" "$@"
  1648. }
  1649. ## @description Start a privileged daemon in the background.
  1650. ## @audience private
  1651. ## @stability evolving
  1652. ## @replaceable yes
  1653. ## @param command
  1654. ## @param class
  1655. ## @param daemonpidfile
  1656. ## @param daemonoutfile
  1657. ## @param wrapperpidfile
  1658. ## @param warpperoutfile
  1659. ## @param daemonerrfile
  1660. ## @param [options]
  1661. function hadoop_start_secure_daemon_wrapper
  1662. {
  1663. # this wraps hadoop_start_secure_daemon to take care
  1664. # of the dirty work to launch a daemon in the background!
  1665. local daemonname=$1
  1666. local class=$2
  1667. # same rules as hadoop_start_secure_daemon except we
  1668. # have some additional parameters
  1669. local daemonpidfile=$3
  1670. local daemonoutfile=$4
  1671. # the pid file of the subprocess that spawned our
  1672. # secure launcher
  1673. local jsvcpidfile=$5
  1674. # the output of the subprocess that spawned our secure
  1675. # launcher
  1676. local jsvcoutfile=$6
  1677. local daemonerrfile=$7
  1678. shift 7
  1679. local counter
  1680. hadoop_rotate_log "${jsvcoutfile}"
  1681. hadoop_start_secure_daemon \
  1682. "${daemonname}" \
  1683. "${class}" \
  1684. "${daemonpidfile}" \
  1685. "${daemonoutfile}" \
  1686. "${daemonerrfile}" \
  1687. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  1688. # we need to avoid a race condition here
  1689. # so let's wait for the fork to finish
  1690. # before overriding with the daemonized pid
  1691. (( counter=0 ))
  1692. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  1693. sleep 1
  1694. (( counter++ ))
  1695. done
  1696. # this is for the daemon pid creation
  1697. #shellcheck disable=SC2086
  1698. echo $! > "${jsvcpidfile}" 2>/dev/null
  1699. if [[ $? -gt 0 ]]; then
  1700. hadoop_error "ERROR: Cannot write ${daemonname} pid ${daemonpidfile}."
  1701. fi
  1702. sleep 1
  1703. #shellcheck disable=SC2086
  1704. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1705. if [[ $? -gt 0 ]]; then
  1706. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1707. fi
  1708. if [[ -f "${daemonpidfile}" ]]; then
  1709. #shellcheck disable=SC2046
  1710. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  1711. if [[ $? -gt 0 ]]; then
  1712. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  1713. fi
  1714. fi
  1715. #shellcheck disable=SC2046
  1716. disown %+ >/dev/null 2>&1
  1717. if [[ $? -gt 0 ]]; then
  1718. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1719. fi
  1720. # capture the ulimit output
  1721. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  1722. #shellcheck disable=SC2086
  1723. if ! ps -p $! >/dev/null 2>&1; then
  1724. return 1
  1725. fi
  1726. return 0
  1727. }
  1728. ## @description Stop the non-privileged `command` daemon with that
  1729. ## @description that is running at `pidfile`.
  1730. ## @audience public
  1731. ## @stability stable
  1732. ## @replaceable yes
  1733. ## @param command
  1734. ## @param pidfile
  1735. function hadoop_stop_daemon
  1736. {
  1737. local cmd=$1
  1738. local pidfile=$2
  1739. shift 2
  1740. local pid
  1741. local cur_pid
  1742. if [[ -f "${pidfile}" ]]; then
  1743. pid=$(cat "$pidfile")
  1744. kill "${pid}" >/dev/null 2>&1
  1745. sleep "${HADOOP_STOP_TIMEOUT}"
  1746. if kill -0 "${pid}" > /dev/null 2>&1; then
  1747. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  1748. kill -9 "${pid}" >/dev/null 2>&1
  1749. fi
  1750. if ps -p "${pid}" > /dev/null 2>&1; then
  1751. hadoop_error "ERROR: Unable to kill ${pid}"
  1752. else
  1753. cur_pid=$(cat "$pidfile")
  1754. if [[ "${pid}" = "${cur_pid}" ]]; then
  1755. rm -f "${pidfile}" >/dev/null 2>&1
  1756. else
  1757. hadoop_error "WARNING: pid has changed for ${cmd}, skip deleting pid file"
  1758. fi
  1759. fi
  1760. fi
  1761. }
  1762. ## @description Stop the privileged `command` daemon with that
  1763. ## @description that is running at `daemonpidfile` and launched with
  1764. ## @description the wrapper at `wrapperpidfile`.
  1765. ## @audience public
  1766. ## @stability stable
  1767. ## @replaceable yes
  1768. ## @param command
  1769. ## @param daemonpidfile
  1770. ## @param wrapperpidfile
  1771. function hadoop_stop_secure_daemon
  1772. {
  1773. local command=$1
  1774. local daemonpidfile=$2
  1775. local privpidfile=$3
  1776. shift 3
  1777. local ret
  1778. local daemon_pid
  1779. local priv_pid
  1780. local cur_daemon_pid
  1781. local cur_priv_pid
  1782. daemon_pid=$(cat "$daemonpidfile")
  1783. priv_pid=$(cat "$privpidfile")
  1784. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  1785. ret=$?
  1786. cur_daemon_pid=$(cat "$daemonpidfile")
  1787. cur_priv_pid=$(cat "$privpidfile")
  1788. if [[ "${daemon_pid}" = "${cur_daemon_pid}" ]]; then
  1789. rm -f "${daemonpidfile}" >/dev/null 2>&1
  1790. else
  1791. hadoop_error "WARNING: daemon pid has changed for ${command}, skip deleting daemon pid file"
  1792. fi
  1793. if [[ "${priv_pid}" = "${cur_priv_pid}" ]]; then
  1794. rm -f "${privpidfile}" >/dev/null 2>&1
  1795. else
  1796. hadoop_error "WARNING: priv pid has changed for ${command}, skip deleting priv pid file"
  1797. fi
  1798. return ${ret}
  1799. }
  1800. ## @description Manage a non-privileged daemon.
  1801. ## @audience private
  1802. ## @stability evolving
  1803. ## @replaceable yes
  1804. ## @param [start|stop|status|default]
  1805. ## @param command
  1806. ## @param class
  1807. ## @param daemonpidfile
  1808. ## @param daemonoutfile
  1809. ## @param [options]
  1810. function hadoop_daemon_handler
  1811. {
  1812. local daemonmode=$1
  1813. local daemonname=$2
  1814. local class=$3
  1815. local daemon_pidfile=$4
  1816. local daemon_outfile=$5
  1817. shift 5
  1818. case ${daemonmode} in
  1819. status)
  1820. hadoop_status_daemon "${daemon_pidfile}"
  1821. exit $?
  1822. ;;
  1823. stop)
  1824. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  1825. exit $?
  1826. ;;
  1827. ##COMPAT -- older hadoops would also start daemons by default
  1828. start|default)
  1829. hadoop_verify_piddir
  1830. hadoop_verify_logdir
  1831. hadoop_status_daemon "${daemon_pidfile}"
  1832. if [[ $? == 0 ]]; then
  1833. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1834. exit 1
  1835. else
  1836. # stale pid file, so just remove it and continue on
  1837. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1838. fi
  1839. ##COMPAT - differenticate between --daemon start and nothing
  1840. # "nothing" shouldn't detach
  1841. if [[ "$daemonmode" = "default" ]]; then
  1842. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  1843. else
  1844. hadoop_start_daemon_wrapper "${daemonname}" \
  1845. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  1846. fi
  1847. ;;
  1848. esac
  1849. }
  1850. ## @description Manage a privileged daemon.
  1851. ## @audience private
  1852. ## @stability evolving
  1853. ## @replaceable yes
  1854. ## @param [start|stop|status|default]
  1855. ## @param command
  1856. ## @param class
  1857. ## @param daemonpidfile
  1858. ## @param daemonoutfile
  1859. ## @param wrapperpidfile
  1860. ## @param wrapperoutfile
  1861. ## @param wrappererrfile
  1862. ## @param [options]
  1863. function hadoop_secure_daemon_handler
  1864. {
  1865. local daemonmode=$1
  1866. local daemonname=$2
  1867. local classname=$3
  1868. local daemon_pidfile=$4
  1869. local daemon_outfile=$5
  1870. local priv_pidfile=$6
  1871. local priv_outfile=$7
  1872. local priv_errfile=$8
  1873. shift 8
  1874. case ${daemonmode} in
  1875. status)
  1876. hadoop_status_daemon "${daemon_pidfile}"
  1877. exit $?
  1878. ;;
  1879. stop)
  1880. hadoop_stop_secure_daemon "${daemonname}" \
  1881. "${daemon_pidfile}" "${priv_pidfile}"
  1882. exit $?
  1883. ;;
  1884. ##COMPAT -- older hadoops would also start daemons by default
  1885. start|default)
  1886. hadoop_verify_piddir
  1887. hadoop_verify_logdir
  1888. hadoop_status_daemon "${daemon_pidfile}"
  1889. if [[ $? == 0 ]]; then
  1890. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1891. exit 1
  1892. else
  1893. # stale pid file, so just remove it and continue on
  1894. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1895. fi
  1896. ##COMPAT - differenticate between --daemon start and nothing
  1897. # "nothing" shouldn't detach
  1898. if [[ "${daemonmode}" = "default" ]]; then
  1899. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  1900. "${daemon_pidfile}" "${daemon_outfile}" \
  1901. "${priv_errfile}" "${priv_pidfile}" "$@"
  1902. else
  1903. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  1904. "${daemon_pidfile}" "${daemon_outfile}" \
  1905. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  1906. fi
  1907. ;;
  1908. esac
  1909. }
  1910. ## @description Get the environment variable used to validate users
  1911. ## @audience public
  1912. ## @stability stable
  1913. ## @replaceable yes
  1914. ## @param subcommand
  1915. ## @return string
  1916. function hadoop_get_verify_uservar
  1917. {
  1918. declare program=$1
  1919. declare command=$2
  1920. declare uprogram
  1921. declare ucommand
  1922. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  1923. || [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then
  1924. uprogram=$(echo "${program}" | tr '[:lower:]' '[:upper:]')
  1925. ucommand=$(echo "${command}" | tr '[:lower:]' '[:upper:]')
  1926. else
  1927. uprogram=${program^^}
  1928. ucommand=${command^^}
  1929. fi
  1930. echo "${uprogram}_${ucommand}_USER"
  1931. }
  1932. ## @description Verify that ${USER} is allowed to execute the
  1933. ## @description given subcommand.
  1934. ## @audience public
  1935. ## @stability stable
  1936. ## @replaceable yes
  1937. ## @param command
  1938. ## @param subcommand
  1939. ## @return return 0 on success
  1940. ## @return exit 1 on failure
  1941. function hadoop_verify_user
  1942. {
  1943. declare program=$1
  1944. declare command=$2
  1945. declare uvar
  1946. uvar=$(hadoop_get_verify_uservar "${program}" "${command}")
  1947. if [[ -n ${!uvar} ]]; then
  1948. if [[ ${!uvar} != "${USER}" ]]; then
  1949. hadoop_error "ERROR: ${command} can only be executed by ${!uvar}."
  1950. exit 1
  1951. fi
  1952. fi
  1953. return 0
  1954. }
  1955. ## @description Verify that ${USER} is allowed to execute the
  1956. ## @description given subcommand.
  1957. ## @audience public
  1958. ## @stability stable
  1959. ## @replaceable yes
  1960. ## @param subcommand
  1961. ## @return 1 on no re-exec needed
  1962. ## @return 0 on need to re-exec
  1963. function hadoop_need_reexec
  1964. {
  1965. declare program=$1
  1966. declare command=$2
  1967. declare uvar
  1968. # we've already been re-execed, bail
  1969. if [[ "${HADOOP_REEXECED_CMD}" = true ]]; then
  1970. return 1
  1971. fi
  1972. # if we have privilege, and the _USER is defined, and _USER is
  1973. # set to someone who isn't us, then yes, we should re-exec.
  1974. # otherwise no, don't re-exec and let the system deal with it.
  1975. if hadoop_privilege_check; then
  1976. uvar=$(hadoop_get_verify_uservar "${program}" "${command}")
  1977. if [[ -n ${!uvar} ]]; then
  1978. if [[ ${!uvar} != "${USER}" ]]; then
  1979. return 0
  1980. fi
  1981. fi
  1982. fi
  1983. return 1
  1984. }
  1985. ## @description Add custom (program)_(command)_OPTS to HADOOP_OPTS.
  1986. ## @description Also handles the deprecated cases from pre-3.x.
  1987. ## @audience public
  1988. ## @stability stable
  1989. ## @replaceable yes
  1990. ## @param program
  1991. ## @param subcommand
  1992. ## @return will exit on failure conditions
  1993. function hadoop_subcommand_opts
  1994. {
  1995. declare program=$1
  1996. declare command=$2
  1997. declare uvar
  1998. declare depvar
  1999. declare uprogram
  2000. declare ucommand
  2001. if [[ -z "${program}" || -z "${command}" ]]; then
  2002. return 1
  2003. fi
  2004. # bash 4 and up have built-in ways to upper and lower
  2005. # case the contents of vars. This is faster than
  2006. # calling tr.
  2007. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  2008. || [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then
  2009. uprogram=$(echo "${program}" | tr '[:lower:]' '[:upper:]')
  2010. ucommand=$(echo "${command}" | tr '[:lower:]' '[:upper:]')
  2011. else
  2012. uprogram=${program^^}
  2013. ucommand=${command^^}
  2014. fi
  2015. uvar="${uprogram}_${ucommand}_OPTS"
  2016. # Let's handle all of the deprecation cases early
  2017. # HADOOP_NAMENODE_OPTS -> HDFS_NAMENODE_OPTS
  2018. depvar="HADOOP_${ucommand}_OPTS"
  2019. if [[ "${depvar}" != "${uvar}" ]]; then
  2020. if [[ -n "${!depvar}" ]]; then
  2021. hadoop_deprecate_envvar "${depvar}" "${uvar}"
  2022. fi
  2023. fi
  2024. if [[ -n ${!uvar} ]]; then
  2025. hadoop_debug "Appending ${uvar} onto HADOOP_OPTS"
  2026. HADOOP_OPTS="${HADOOP_OPTS} ${!uvar}"
  2027. return 0
  2028. fi
  2029. }
  2030. ## @description Add custom (program)_(command)_SECURE_EXTRA_OPTS to HADOOP_OPTS.
  2031. ## @description This *does not* handle the pre-3.x deprecated cases
  2032. ## @audience public
  2033. ## @stability stable
  2034. ## @replaceable yes
  2035. ## @param program
  2036. ## @param subcommand
  2037. ## @return will exit on failure conditions
  2038. function hadoop_subcommand_secure_opts
  2039. {
  2040. declare program=$1
  2041. declare command=$2
  2042. declare uvar
  2043. declare uprogram
  2044. declare ucommand
  2045. if [[ -z "${program}" || -z "${command}" ]]; then
  2046. return 1
  2047. fi
  2048. # bash 4 and up have built-in ways to upper and lower
  2049. # case the contents of vars. This is faster than
  2050. # calling tr.
  2051. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  2052. || [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then
  2053. uprogram=$(echo "${program}" | tr '[:lower:]' '[:upper:]')
  2054. ucommand=$(echo "${command}" | tr '[:lower:]' '[:upper:]')
  2055. else
  2056. uprogram=${program^^}
  2057. ucommand=${command^^}
  2058. fi
  2059. # HDFS_DATANODE_SECURE_EXTRA_OPTS
  2060. # HDFS_NFS3_SECURE_EXTRA_OPTS
  2061. # ...
  2062. uvar="${uprogram}_${ucommand}_SECURE_EXTRA_OPTS"
  2063. if [[ -n ${!uvar} ]]; then
  2064. hadoop_debug "Appending ${uvar} onto HADOOP_OPTS"
  2065. HADOOP_OPTS="${HADOOP_OPTS} ${!uvar}"
  2066. return 0
  2067. fi
  2068. }
  2069. ## @description Perform the 'hadoop classpath', etc subcommand with the given
  2070. ## @description parameters
  2071. ## @audience private
  2072. ## @stability evolving
  2073. ## @replaceable yes
  2074. ## @param [parameters]
  2075. ## @return will print & exit with no params
  2076. function hadoop_do_classpath_subcommand
  2077. {
  2078. if [[ "$#" -gt 1 ]]; then
  2079. eval "$1"=org.apache.hadoop.util.Classpath
  2080. else
  2081. hadoop_finalize
  2082. echo "${CLASSPATH}"
  2083. exit 0
  2084. fi
  2085. }
  2086. ## @description generic shell script opton parser. sets
  2087. ## @description HADOOP_PARSE_COUNTER to set number the
  2088. ## @description caller should shift
  2089. ## @audience private
  2090. ## @stability evolving
  2091. ## @replaceable yes
  2092. ## @param [parameters, typically "$@"]
  2093. function hadoop_parse_args
  2094. {
  2095. HADOOP_DAEMON_MODE="default"
  2096. HADOOP_PARSE_COUNTER=0
  2097. # not all of the options supported here are supported by all commands
  2098. # however these are:
  2099. hadoop_add_option "--config dir" "Hadoop config directory"
  2100. hadoop_add_option "--debug" "turn on shell script debug mode"
  2101. hadoop_add_option "--help" "usage information"
  2102. while true; do
  2103. hadoop_debug "hadoop_parse_args: processing $1"
  2104. case $1 in
  2105. --buildpaths)
  2106. # shellcheck disable=SC2034
  2107. HADOOP_ENABLE_BUILD_PATHS=true
  2108. shift
  2109. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2110. ;;
  2111. --config)
  2112. shift
  2113. confdir=$1
  2114. shift
  2115. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2116. if [[ -d "${confdir}" ]]; then
  2117. # shellcheck disable=SC2034
  2118. HADOOP_CONF_DIR="${confdir}"
  2119. elif [[ -z "${confdir}" ]]; then
  2120. hadoop_error "ERROR: No parameter provided for --config "
  2121. hadoop_exit_with_usage 1
  2122. else
  2123. hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
  2124. hadoop_exit_with_usage 1
  2125. fi
  2126. ;;
  2127. --daemon)
  2128. shift
  2129. HADOOP_DAEMON_MODE=$1
  2130. shift
  2131. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2132. if [[ -z "${HADOOP_DAEMON_MODE}" || \
  2133. ! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
  2134. hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
  2135. hadoop_exit_with_usage 1
  2136. fi
  2137. ;;
  2138. --debug)
  2139. shift
  2140. # shellcheck disable=SC2034
  2141. HADOOP_SHELL_SCRIPT_DEBUG=true
  2142. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2143. ;;
  2144. --help|-help|-h|help|--h|--\?|-\?|\?)
  2145. hadoop_exit_with_usage 0
  2146. ;;
  2147. --hostnames)
  2148. shift
  2149. # shellcheck disable=SC2034
  2150. HADOOP_WORKER_NAMES="$1"
  2151. shift
  2152. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2153. ;;
  2154. --hosts)
  2155. shift
  2156. hadoop_populate_workers_file "$1"
  2157. shift
  2158. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2159. ;;
  2160. --loglevel)
  2161. shift
  2162. # shellcheck disable=SC2034
  2163. HADOOP_LOGLEVEL="$1"
  2164. shift
  2165. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2166. ;;
  2167. --reexec)
  2168. shift
  2169. if [[ "${HADOOP_REEXECED_CMD}" = true ]]; then
  2170. hadoop_error "ERROR: re-exec fork bomb prevention: --reexec already called"
  2171. exit 1
  2172. fi
  2173. HADOOP_REEXECED_CMD=true
  2174. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2175. ;;
  2176. --workers)
  2177. shift
  2178. # shellcheck disable=SC2034
  2179. HADOOP_WORKER_MODE=true
  2180. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2181. ;;
  2182. *)
  2183. break
  2184. ;;
  2185. esac
  2186. done
  2187. hadoop_debug "hadoop_parse: asking caller to skip ${HADOOP_PARSE_COUNTER}"
  2188. }
  2189. ## @description XML-escapes the characters (&'"<>) in the given parameter.
  2190. ## @audience private
  2191. ## @stability evolving
  2192. ## @replaceable yes
  2193. ## @param string
  2194. ## @return XML-escaped string
  2195. function hadoop_xml_escape
  2196. {
  2197. sed -e 's/&/\&amp;/g' -e 's/"/\\\&quot;/g' \
  2198. -e "s/'/\\\\\&apos;/g" -e 's/</\\\&lt;/g' -e 's/>/\\\&gt;/g' <<< "$1"
  2199. }
  2200. ## @description sed-escapes the characters (\/&) in the given parameter.
  2201. ## @audience private
  2202. ## @stability evolving
  2203. ## @replaceable yes
  2204. ## @param string
  2205. ## @return sed-escaped string
  2206. function hadoop_sed_escape
  2207. {
  2208. sed -e 's/[\/&]/\\&/g' <<< "$1"
  2209. }