hadoop-functions.sh 75 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # we need to declare this globally as an array, which can only
  17. # be done outside of a function
  18. declare -a HADOOP_SUBCMD_USAGE
  19. declare -a HADOOP_OPTION_USAGE
  20. declare -a HADOOP_SUBCMD_USAGE_TYPES
  21. ## @description Print a message to stderr
  22. ## @audience public
  23. ## @stability stable
  24. ## @replaceable no
  25. ## @param string
  26. function hadoop_error
  27. {
  28. echo "$*" 1>&2
  29. }
  30. ## @description Print a message to stderr if --debug is turned on
  31. ## @audience public
  32. ## @stability stable
  33. ## @replaceable no
  34. ## @param string
  35. function hadoop_debug
  36. {
  37. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  38. echo "DEBUG: $*" 1>&2
  39. fi
  40. }
  41. ## @description Given a filename or dir, return the absolute version of it
  42. ## @description This works as an alternative to readlink, which isn't
  43. ## @description portable.
  44. ## @audience public
  45. ## @stability stable
  46. ## @param fsobj
  47. ## @replaceable no
  48. ## @return 0 success
  49. ## @return 1 failure
  50. ## @return stdout abspath
  51. function hadoop_abs
  52. {
  53. declare obj=$1
  54. declare dir
  55. declare fn
  56. declare dirret
  57. if [[ ! -e ${obj} ]]; then
  58. return 1
  59. elif [[ -d ${obj} ]]; then
  60. dir=${obj}
  61. else
  62. dir=$(dirname -- "${obj}")
  63. fn=$(basename -- "${obj}")
  64. fn="/${fn}"
  65. fi
  66. dir=$(cd -P -- "${dir}" >/dev/null 2>/dev/null && pwd -P)
  67. dirret=$?
  68. if [[ ${dirret} = 0 ]]; then
  69. echo "${dir}${fn}"
  70. return 0
  71. fi
  72. return 1
  73. }
  74. ## @description Given variable $1 delete $2 from it
  75. ## @audience public
  76. ## @stability stable
  77. ## @replaceable no
  78. function hadoop_delete_entry
  79. {
  80. if [[ ${!1} =~ \ ${2}\ ]] ; then
  81. hadoop_debug "Removing ${2} from ${1}"
  82. eval "${1}"=\""${!1// ${2} }"\"
  83. fi
  84. }
  85. ## @description Given variable $1 add $2 to it
  86. ## @audience public
  87. ## @stability stable
  88. ## @replaceable no
  89. function hadoop_add_entry
  90. {
  91. if [[ ! ${!1} =~ \ ${2}\ ]] ; then
  92. hadoop_debug "Adding ${2} to ${1}"
  93. #shellcheck disable=SC2140
  94. eval "${1}"=\""${!1} ${2} "\"
  95. fi
  96. }
  97. ## @description Given variable $1 determine if $2 is in it
  98. ## @audience public
  99. ## @stability stable
  100. ## @replaceable no
  101. ## @return 0 = yes, 1 = no
  102. function hadoop_verify_entry
  103. {
  104. # this unfortunately can't really be tested by bats. :(
  105. # so if this changes, be aware that unit tests effectively
  106. # do this function in them
  107. [[ ${!1} =~ \ ${2}\ ]]
  108. }
  109. ## @description Check if an array has a given value
  110. ## @audience public
  111. ## @stability stable
  112. ## @replaceable yes
  113. ## @param element
  114. ## @param array
  115. ## @returns 0 = yes
  116. ## @returns 1 = no
  117. function hadoop_array_contains
  118. {
  119. declare element=$1
  120. shift
  121. declare val
  122. if [[ "$#" -eq 0 ]]; then
  123. return 1
  124. fi
  125. for val in "${@}"; do
  126. if [[ "${val}" == "${element}" ]]; then
  127. return 0
  128. fi
  129. done
  130. return 1
  131. }
  132. ## @description Add the `appendstring` if `checkstring` is not
  133. ## @description present in the given array
  134. ## @audience public
  135. ## @stability stable
  136. ## @replaceable yes
  137. ## @param envvar
  138. ## @param appendstring
  139. function hadoop_add_array_param
  140. {
  141. declare arrname=$1
  142. declare add=$2
  143. declare arrref="${arrname}[@]"
  144. declare array=("${!arrref}")
  145. if ! hadoop_array_contains "${add}" "${array[@]}"; then
  146. #shellcheck disable=SC1083,SC2086
  147. eval ${arrname}=\(\"\${array[@]}\" \"${add}\" \)
  148. hadoop_debug "$1 accepted $2"
  149. else
  150. hadoop_debug "$1 declined $2"
  151. fi
  152. }
  153. ## @description Sort an array (must not contain regexps)
  154. ## @description present in the given array
  155. ## @audience public
  156. ## @stability stable
  157. ## @replaceable yes
  158. ## @param arrayvar
  159. function hadoop_sort_array
  160. {
  161. declare arrname=$1
  162. declare arrref="${arrname}[@]"
  163. declare array=("${!arrref}")
  164. declare oifs
  165. declare globstatus
  166. declare -a sa
  167. globstatus=$(set -o | grep noglob | awk '{print $NF}')
  168. set -f
  169. oifs=${IFS}
  170. # shellcheck disable=SC2034
  171. IFS=$'\n' sa=($(sort <<<"${array[*]}"))
  172. # shellcheck disable=SC1083
  173. eval "${arrname}"=\(\"\${sa[@]}\"\)
  174. IFS=${oifs}
  175. if [[ "${globstatus}" = off ]]; then
  176. set +f
  177. fi
  178. }
  179. ## @description Check if we are running with priv
  180. ## @description by default, this implementation looks for
  181. ## @description EUID=0. For OSes that have true priv
  182. ## @description separation, this should be something more complex
  183. ## @audience private
  184. ## @stability evolving
  185. ## @replaceable yes
  186. ## @return 1 = no priv
  187. ## @return 0 = priv
  188. function hadoop_privilege_check
  189. {
  190. [[ "${EUID}" = 0 ]]
  191. }
  192. ## @description Execute a command via su when running as root
  193. ## @description if the given user is found or exit with
  194. ## @description failure if not.
  195. ## @description otherwise just run it. (This is intended to
  196. ## @description be used by the start-*/stop-* scripts.)
  197. ## @audience private
  198. ## @stability evolving
  199. ## @replaceable yes
  200. ## @param user
  201. ## @param commandstring
  202. ## @return exitstatus
  203. function hadoop_su
  204. {
  205. declare user=$1
  206. shift
  207. if hadoop_privilege_check; then
  208. if hadoop_verify_user_resolves user; then
  209. su -l "${user}" -- "$@"
  210. else
  211. hadoop_error "ERROR: Refusing to run as root: ${user} account is not found. Aborting."
  212. return 1
  213. fi
  214. else
  215. "$@"
  216. fi
  217. }
  218. ## @description Execute a command via su when running as root
  219. ## @description with extra support for commands that might
  220. ## @description legitimately start as root (e.g., datanode)
  221. ## @description (This is intended to
  222. ## @description be used by the start-*/stop-* scripts.)
  223. ## @audience private
  224. ## @stability evolving
  225. ## @replaceable no
  226. ## @param user
  227. ## @param commandstring
  228. ## @return exitstatus
  229. function hadoop_uservar_su
  230. {
  231. ## startup matrix:
  232. #
  233. # if $EUID != 0, then exec
  234. # if $EUID =0 then
  235. # if hdfs_subcmd_user is defined, call hadoop_su to exec
  236. # if hdfs_subcmd_user is not defined, error
  237. #
  238. # For secure daemons, this means both the secure and insecure env vars need to be
  239. # defined. e.g., HDFS_DATANODE_USER=root HDFS_DATANODE_SECURE_USER=hdfs
  240. # This function will pick up the "normal" var, switch to that user, then
  241. # execute the command which will then pick up the "secure" version.
  242. #
  243. declare program=$1
  244. declare command=$2
  245. shift 2
  246. declare uprogram
  247. declare ucommand
  248. declare uvar
  249. declare svar
  250. if hadoop_privilege_check; then
  251. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  252. svar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_USER)
  253. if [[ -n "${!uvar}" ]]; then
  254. hadoop_su "${!uvar}" "$@"
  255. elif [[ -n "${!svar}" ]]; then
  256. ## if we are here, then SECURE_USER with no USER defined
  257. ## we are already privileged, so just run the command and hope
  258. ## for the best
  259. "$@"
  260. else
  261. hadoop_error "ERROR: Attempting to operate on ${program} ${command} as root"
  262. hadoop_error "ERROR: but there is no ${uvar} defined. Aborting operation."
  263. return 1
  264. fi
  265. else
  266. "$@"
  267. fi
  268. }
  269. ## @description Add a subcommand to the usage output
  270. ## @audience private
  271. ## @stability evolving
  272. ## @replaceable no
  273. ## @param subcommand
  274. ## @param subcommandtype
  275. ## @param subcommanddesc
  276. function hadoop_add_subcommand
  277. {
  278. declare subcmd=$1
  279. declare subtype=$2
  280. declare text=$3
  281. hadoop_debug "${subcmd} as a ${subtype}"
  282. hadoop_add_array_param HADOOP_SUBCMD_USAGE_TYPES "${subtype}"
  283. # done in this order so that sort works later
  284. HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${subtype}@${text}"
  285. ((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
  286. }
  287. ## @description Add an option to the usage output
  288. ## @audience private
  289. ## @stability evolving
  290. ## @replaceable no
  291. ## @param subcommand
  292. ## @param subcommanddesc
  293. function hadoop_add_option
  294. {
  295. local option=$1
  296. local text=$2
  297. HADOOP_OPTION_USAGE[${HADOOP_OPTION_USAGE_COUNTER}]="${option}@${text}"
  298. ((HADOOP_OPTION_USAGE_COUNTER=HADOOP_OPTION_USAGE_COUNTER+1))
  299. }
  300. ## @description Reset the usage information to blank
  301. ## @audience private
  302. ## @stability evolving
  303. ## @replaceable no
  304. function hadoop_reset_usage
  305. {
  306. HADOOP_SUBCMD_USAGE=()
  307. HADOOP_OPTION_USAGE=()
  308. HADOOP_SUBCMD_USAGE_TYPES=()
  309. HADOOP_SUBCMD_USAGE_COUNTER=0
  310. HADOOP_OPTION_USAGE_COUNTER=0
  311. }
  312. ## @description Print a screen-size aware two-column output
  313. ## @description if reqtype is not null, only print those requested
  314. ## @audience private
  315. ## @stability evolving
  316. ## @replaceable no
  317. ## @param reqtype
  318. ## @param array
  319. function hadoop_generic_columnprinter
  320. {
  321. declare reqtype=$1
  322. shift
  323. declare -a input=("$@")
  324. declare -i i=0
  325. declare -i counter=0
  326. declare line
  327. declare text
  328. declare option
  329. declare giventext
  330. declare -i maxoptsize
  331. declare -i foldsize
  332. declare -a tmpa
  333. declare numcols
  334. declare brup
  335. if [[ -n "${COLUMNS}" ]]; then
  336. numcols=${COLUMNS}
  337. else
  338. numcols=$(tput cols) 2>/dev/null
  339. COLUMNS=${numcols}
  340. fi
  341. if [[ -z "${numcols}"
  342. || ! "${numcols}" =~ ^[0-9]+$ ]]; then
  343. numcols=75
  344. else
  345. ((numcols=numcols-5))
  346. fi
  347. while read -r line; do
  348. tmpa[${counter}]=${line}
  349. ((counter=counter+1))
  350. IFS='@' read -ra brup <<< "${line}"
  351. option="${brup[0]}"
  352. if [[ ${#option} -gt ${maxoptsize} ]]; then
  353. maxoptsize=${#option}
  354. fi
  355. done < <(for text in "${input[@]}"; do
  356. echo "${text}"
  357. done | sort)
  358. i=0
  359. ((foldsize=numcols-maxoptsize))
  360. until [[ $i -eq ${#tmpa[@]} ]]; do
  361. IFS='@' read -ra brup <<< "${tmpa[$i]}"
  362. option="${brup[0]}"
  363. cmdtype="${brup[1]}"
  364. giventext="${brup[2]}"
  365. if [[ -n "${reqtype}" ]]; then
  366. if [[ "${cmdtype}" != "${reqtype}" ]]; then
  367. ((i=i+1))
  368. continue
  369. fi
  370. fi
  371. if [[ -z "${giventext}" ]]; then
  372. giventext=${cmdtype}
  373. fi
  374. while read -r line; do
  375. printf "%-${maxoptsize}s %-s\n" "${option}" "${line}"
  376. option=" "
  377. done < <(echo "${giventext}"| fold -s -w ${foldsize})
  378. ((i=i+1))
  379. done
  380. }
  381. ## @description generate standard usage output
  382. ## @description and optionally takes a class
  383. ## @audience private
  384. ## @stability evolving
  385. ## @replaceable no
  386. ## @param execname
  387. ## @param true|false
  388. ## @param [text to use in place of SUBCOMMAND]
  389. function hadoop_generate_usage
  390. {
  391. declare cmd=$1
  392. declare takesclass=$2
  393. declare subcmdtext=${3:-"SUBCOMMAND"}
  394. declare haveoptions
  395. declare optstring
  396. declare havesubs
  397. declare subcmdstring
  398. declare cmdtype
  399. cmd=${cmd##*/}
  400. if [[ -n "${HADOOP_OPTION_USAGE_COUNTER}"
  401. && "${HADOOP_OPTION_USAGE_COUNTER}" -gt 0 ]]; then
  402. haveoptions=true
  403. optstring=" [OPTIONS]"
  404. fi
  405. if [[ -n "${HADOOP_SUBCMD_USAGE_COUNTER}"
  406. && "${HADOOP_SUBCMD_USAGE_COUNTER}" -gt 0 ]]; then
  407. havesubs=true
  408. subcmdstring=" ${subcmdtext} [${subcmdtext} OPTIONS]"
  409. fi
  410. echo "Usage: ${cmd}${optstring}${subcmdstring}"
  411. if [[ ${takesclass} = true ]]; then
  412. echo " or ${cmd}${optstring} CLASSNAME [CLASSNAME OPTIONS]"
  413. echo " where CLASSNAME is a user-provided Java class"
  414. fi
  415. if [[ "${haveoptions}" = true ]]; then
  416. echo ""
  417. echo " OPTIONS is none or any of:"
  418. echo ""
  419. hadoop_generic_columnprinter "" "${HADOOP_OPTION_USAGE[@]}"
  420. fi
  421. if [[ "${havesubs}" = true ]]; then
  422. echo ""
  423. echo " ${subcmdtext} is one of:"
  424. echo ""
  425. if [[ "${#HADOOP_SUBCMD_USAGE_TYPES[@]}" -gt 0 ]]; then
  426. hadoop_sort_array HADOOP_SUBCMD_USAGE_TYPES
  427. for subtype in "${HADOOP_SUBCMD_USAGE_TYPES[@]}"; do
  428. #shellcheck disable=SC2086
  429. cmdtype="$(tr '[:lower:]' '[:upper:]' <<< ${subtype:0:1})${subtype:1}"
  430. printf "\n %s Commands:\n\n" "${cmdtype}"
  431. hadoop_generic_columnprinter "${subtype}" "${HADOOP_SUBCMD_USAGE[@]}"
  432. done
  433. else
  434. hadoop_generic_columnprinter "" "${HADOOP_SUBCMD_USAGE[@]}"
  435. fi
  436. echo ""
  437. echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
  438. fi
  439. }
  440. ## @description Replace `oldvar` with `newvar` if `oldvar` exists.
  441. ## @audience public
  442. ## @stability stable
  443. ## @replaceable yes
  444. ## @param oldvar
  445. ## @param newvar
  446. function hadoop_deprecate_envvar
  447. {
  448. local oldvar=$1
  449. local newvar=$2
  450. local oldval=${!oldvar}
  451. local newval=${!newvar}
  452. if [[ -n "${oldval}" ]]; then
  453. hadoop_error "WARNING: ${oldvar} has been replaced by ${newvar}. Using value of ${oldvar}."
  454. # shellcheck disable=SC2086
  455. eval ${newvar}=\"${oldval}\"
  456. # shellcheck disable=SC2086
  457. newval=${oldval}
  458. # shellcheck disable=SC2086
  459. eval ${newvar}=\"${newval}\"
  460. fi
  461. }
  462. ## @description Declare `var` being used and print its value.
  463. ## @audience public
  464. ## @stability stable
  465. ## @replaceable yes
  466. ## @param var
  467. function hadoop_using_envvar
  468. {
  469. local var=$1
  470. local val=${!var}
  471. if [[ -n "${val}" ]]; then
  472. hadoop_debug "${var} = ${val}"
  473. fi
  474. }
  475. ## @description Create the directory 'dir'.
  476. ## @audience public
  477. ## @stability stable
  478. ## @replaceable yes
  479. ## @param dir
  480. function hadoop_mkdir
  481. {
  482. local dir=$1
  483. if [[ ! -w "${dir}" ]] && [[ ! -d "${dir}" ]]; then
  484. hadoop_error "WARNING: ${dir} does not exist. Creating."
  485. if ! mkdir -p "${dir}"; then
  486. hadoop_error "ERROR: Unable to create ${dir}. Aborting."
  487. exit 1
  488. fi
  489. fi
  490. }
  491. ## @description Bootstraps the Hadoop shell environment
  492. ## @audience private
  493. ## @stability evolving
  494. ## @replaceable no
  495. function hadoop_bootstrap
  496. {
  497. # the root of the Hadoop installation
  498. # See HADOOP-6255 for the expected directory structure layout
  499. if [[ -n "${DEFAULT_LIBEXEC_DIR}" ]]; then
  500. hadoop_error "WARNING: DEFAULT_LIBEXEC_DIR ignored. It has been replaced by HADOOP_DEFAULT_LIBEXEC_DIR."
  501. fi
  502. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  503. # We can piggyback off of that to figure out where the default
  504. # HADOOP_FREFIX should be. This allows us to run without
  505. # HADOOP_HOME ever being defined by a human! As a consequence
  506. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  507. # env var within Hadoop.
  508. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  509. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  510. exit 1
  511. fi
  512. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  513. HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DEFAULT_PREFIX}
  514. export HADOOP_HOME
  515. #
  516. # short-cuts. vendors may redefine these as well, preferably
  517. # in hadoop-layout.sh
  518. #
  519. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  520. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  521. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  522. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  523. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  524. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  525. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  526. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  527. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  528. HDDS_DIR=${HDDS_DIR:-"share/hadoop/hdds"}
  529. HDDS_LIB_JARS_DIR=${HDDS_LIB_JARS_DIR:-"share/hadoop/hdds/lib"}
  530. OZONE_DIR=${OZONE_DIR:-"share/hadoop/ozone"}
  531. OZONE_LIB_JARS_DIR=${OZONE_LIB_JARS_DIR:-"share/hadoop/ozone/lib"}
  532. OZONEFS_DIR=${OZONEFS_DIR:-"share/hadoop/ozonefs"}
  533. HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}}
  534. HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
  535. HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}
  536. # by default, whatever we are about to run doesn't support
  537. # daemonization
  538. HADOOP_SUBCMD_SUPPORTDAEMONIZATION=false
  539. # by default, we have not been self-re-execed
  540. HADOOP_REEXECED_CMD=false
  541. HADOOP_SUBCMD_SECURESERVICE=false
  542. # This is the default we claim in hadoop-env.sh
  543. JSVC_HOME=${JSVC_HOME:-"/usr/bin"}
  544. # usage output set to zero
  545. hadoop_reset_usage
  546. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  547. # defaults
  548. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  549. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  550. }
  551. ## @description Locate Hadoop's configuration directory
  552. ## @audience private
  553. ## @stability evolving
  554. ## @replaceable no
  555. function hadoop_find_confdir
  556. {
  557. local conf_dir
  558. # An attempt at compatibility with some Hadoop 1.x
  559. # installs.
  560. if [[ -e "${HADOOP_HOME}/conf/hadoop-env.sh" ]]; then
  561. conf_dir="conf"
  562. else
  563. conf_dir="etc/hadoop"
  564. fi
  565. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_HOME}/${conf_dir}}"
  566. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  567. }
  568. ## @description Validate ${HADOOP_CONF_DIR}
  569. ## @audience public
  570. ## @stability stable
  571. ## @replaceable yes
  572. ## @return will exit on failure conditions
  573. function hadoop_verify_confdir
  574. {
  575. # Check only log4j.properties by default.
  576. # --loglevel does not work without logger settings in log4j.log4j.properties.
  577. if [[ ! -f "${HADOOP_CONF_DIR}/log4j.properties" ]]; then
  578. hadoop_error "WARNING: log4j.properties is not found. HADOOP_CONF_DIR may be incomplete."
  579. fi
  580. }
  581. ## @description Import the hadoop-env.sh settings
  582. ## @audience private
  583. ## @stability evolving
  584. ## @replaceable no
  585. function hadoop_exec_hadoopenv
  586. {
  587. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  588. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  589. export HADOOP_ENV_PROCESSED=true
  590. # shellcheck source=./hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
  591. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  592. fi
  593. fi
  594. }
  595. ## @description Import the replaced functions
  596. ## @audience private
  597. ## @stability evolving
  598. ## @replaceable no
  599. function hadoop_exec_userfuncs
  600. {
  601. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  602. # shellcheck disable=SC1090
  603. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  604. fi
  605. }
  606. ## @description Read the user's settings. This provides for users to
  607. ## @description override and/or append hadoop-env.sh. It is not meant
  608. ## @description as a complete system override.
  609. ## @audience private
  610. ## @stability evolving
  611. ## @replaceable yes
  612. function hadoop_exec_user_hadoopenv
  613. {
  614. if [[ -f "${HOME}/.hadoop-env" ]]; then
  615. hadoop_debug "Applying the user's .hadoop-env"
  616. # shellcheck disable=SC1090
  617. . "${HOME}/.hadoop-env"
  618. fi
  619. }
  620. ## @description Read the user's settings. This provides for users to
  621. ## @description run Hadoop Shell API after system bootstrap
  622. ## @audience private
  623. ## @stability evolving
  624. ## @replaceable yes
  625. function hadoop_exec_hadooprc
  626. {
  627. if [[ -f "${HOME}/.hadooprc" ]]; then
  628. hadoop_debug "Applying the user's .hadooprc"
  629. # shellcheck disable=SC1090
  630. . "${HOME}/.hadooprc"
  631. fi
  632. }
  633. ## @description Import shellprofile.d content
  634. ## @audience private
  635. ## @stability evolving
  636. ## @replaceable yes
  637. function hadoop_import_shellprofiles
  638. {
  639. local i
  640. local files1
  641. local files2
  642. if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
  643. files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
  644. hadoop_debug "shellprofiles: ${files1[*]}"
  645. else
  646. hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
  647. fi
  648. if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
  649. files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
  650. fi
  651. # enable bundled shellprofiles that come
  652. # from hadoop-tools. This converts the user-facing HADOOP_OPTIONAL_TOOLS
  653. # to the HADOOP_TOOLS_OPTIONS that the shell profiles expect.
  654. # See dist-tools-hooks-maker for how the example HADOOP_OPTIONAL_TOOLS
  655. # gets populated into hadoop-env.sh
  656. for i in ${HADOOP_OPTIONAL_TOOLS//,/ }; do
  657. hadoop_add_entry HADOOP_TOOLS_OPTIONS "${i}"
  658. done
  659. for i in "${files1[@]}" "${files2[@]}"
  660. do
  661. if [[ -n "${i}"
  662. && -f "${i}" ]]; then
  663. hadoop_debug "Profiles: importing ${i}"
  664. # shellcheck disable=SC1090
  665. . "${i}"
  666. fi
  667. done
  668. }
  669. ## @description Initialize the registered shell profiles
  670. ## @audience private
  671. ## @stability evolving
  672. ## @replaceable yes
  673. function hadoop_shellprofiles_init
  674. {
  675. local i
  676. for i in ${HADOOP_SHELL_PROFILES}
  677. do
  678. if declare -F _${i}_hadoop_init >/dev/null ; then
  679. hadoop_debug "Profiles: ${i} init"
  680. # shellcheck disable=SC2086
  681. _${i}_hadoop_init
  682. fi
  683. done
  684. }
  685. ## @description Apply the shell profile classpath additions
  686. ## @audience private
  687. ## @stability evolving
  688. ## @replaceable yes
  689. function hadoop_shellprofiles_classpath
  690. {
  691. local i
  692. for i in ${HADOOP_SHELL_PROFILES}
  693. do
  694. if declare -F _${i}_hadoop_classpath >/dev/null ; then
  695. hadoop_debug "Profiles: ${i} classpath"
  696. # shellcheck disable=SC2086
  697. _${i}_hadoop_classpath
  698. fi
  699. done
  700. }
  701. ## @description Apply the shell profile native library additions
  702. ## @audience private
  703. ## @stability evolving
  704. ## @replaceable yes
  705. function hadoop_shellprofiles_nativelib
  706. {
  707. local i
  708. for i in ${HADOOP_SHELL_PROFILES}
  709. do
  710. if declare -F _${i}_hadoop_nativelib >/dev/null ; then
  711. hadoop_debug "Profiles: ${i} nativelib"
  712. # shellcheck disable=SC2086
  713. _${i}_hadoop_nativelib
  714. fi
  715. done
  716. }
  717. ## @description Apply the shell profile final configuration
  718. ## @audience private
  719. ## @stability evolving
  720. ## @replaceable yes
  721. function hadoop_shellprofiles_finalize
  722. {
  723. local i
  724. for i in ${HADOOP_SHELL_PROFILES}
  725. do
  726. if declare -F _${i}_hadoop_finalize >/dev/null ; then
  727. hadoop_debug "Profiles: ${i} finalize"
  728. # shellcheck disable=SC2086
  729. _${i}_hadoop_finalize
  730. fi
  731. done
  732. }
  733. ## @description Initialize the Hadoop shell environment, now that
  734. ## @description user settings have been imported
  735. ## @audience private
  736. ## @stability evolving
  737. ## @replaceable no
  738. function hadoop_basic_init
  739. {
  740. # Some of these are also set in hadoop-env.sh.
  741. # we still set them here just in case hadoop-env.sh is
  742. # broken in some way, set up defaults, etc.
  743. #
  744. # but it is important to note that if you update these
  745. # you also need to update hadoop-env.sh as well!!!
  746. CLASSPATH=""
  747. hadoop_debug "Initialize CLASSPATH"
  748. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  749. [[ -d "${HADOOP_HOME}/${HADOOP_COMMON_DIR}" ]]; then
  750. export HADOOP_COMMON_HOME="${HADOOP_HOME}"
  751. fi
  752. # default policy file for service-level authorization
  753. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  754. # define HADOOP_HDFS_HOME
  755. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  756. [[ -d "${HADOOP_HOME}/${HDFS_DIR}" ]]; then
  757. export HADOOP_HDFS_HOME="${HADOOP_HOME}"
  758. fi
  759. # define HADOOP_YARN_HOME
  760. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  761. [[ -d "${HADOOP_HOME}/${YARN_DIR}" ]]; then
  762. export HADOOP_YARN_HOME="${HADOOP_HOME}"
  763. fi
  764. # define HADOOP_MAPRED_HOME
  765. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  766. [[ -d "${HADOOP_HOME}/${MAPRED_DIR}" ]]; then
  767. export HADOOP_MAPRED_HOME="${HADOOP_HOME}"
  768. fi
  769. if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
  770. hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
  771. exit 1
  772. fi
  773. if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
  774. hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
  775. exit 1
  776. fi
  777. if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
  778. hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
  779. exit 1
  780. fi
  781. if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
  782. hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
  783. exit 1
  784. fi
  785. # if for some reason the shell doesn't have $USER defined
  786. # (e.g., ssh'd in to execute a command)
  787. # let's get the effective username and use that
  788. USER=${USER:-$(id -nu)}
  789. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  790. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_HOME}/logs"}
  791. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  792. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  793. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  794. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  795. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  796. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  797. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  798. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  799. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  800. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  801. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  802. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  803. }
  804. ## @description Set the worker support information to the contents
  805. ## @description of `filename`
  806. ## @audience public
  807. ## @stability stable
  808. ## @replaceable no
  809. ## @param filename
  810. ## @return will exit if file does not exist
  811. function hadoop_populate_workers_file
  812. {
  813. local workersfile=$1
  814. shift
  815. if [[ -f "${workersfile}" ]]; then
  816. HADOOP_WORKERS="${workersfile}"
  817. elif [[ -f "${HADOOP_CONF_DIR}/${workersfile}" ]]; then
  818. HADOOP_WORKERS="${HADOOP_CONF_DIR}/${workersfile}"
  819. else
  820. hadoop_error "ERROR: Cannot find hosts file \"${workersfile}\""
  821. hadoop_exit_with_usage 1
  822. fi
  823. }
  824. ## @description Rotates the given `file` until `number` of
  825. ## @description files exist.
  826. ## @audience public
  827. ## @stability stable
  828. ## @replaceable no
  829. ## @param filename
  830. ## @param [number]
  831. ## @return $? will contain last mv's return value
  832. function hadoop_rotate_log
  833. {
  834. #
  835. # Users are likely to replace this one for something
  836. # that gzips or uses dates or who knows what.
  837. #
  838. # be aware that &1 and &2 might go through here
  839. # so don't do anything too crazy...
  840. #
  841. local log=$1;
  842. local num=${2:-5};
  843. if [[ -f "${log}" ]]; then # rotate logs
  844. while [[ ${num} -gt 1 ]]; do
  845. #shellcheck disable=SC2086
  846. let prev=${num}-1
  847. if [[ -f "${log}.${prev}" ]]; then
  848. mv "${log}.${prev}" "${log}.${num}"
  849. fi
  850. num=${prev}
  851. done
  852. mv "${log}" "${log}.${num}"
  853. fi
  854. }
  855. ## @description Via ssh, log into `hostname` and run `command`
  856. ## @audience private
  857. ## @stability evolving
  858. ## @replaceable yes
  859. ## @param hostname
  860. ## @param command
  861. ## @param [...]
  862. function hadoop_actual_ssh
  863. {
  864. # we are passing this function to xargs
  865. # should get hostname followed by rest of command line
  866. local worker=$1
  867. shift
  868. # shellcheck disable=SC2086
  869. ssh ${HADOOP_SSH_OPTS} ${worker} $"${@// /\\ }" 2>&1 | sed "s/^/$worker: /"
  870. }
  871. ## @description Connect to ${HADOOP_WORKERS} or ${HADOOP_WORKER_NAMES}
  872. ## @description and execute command.
  873. ## @audience private
  874. ## @stability evolving
  875. ## @replaceable yes
  876. ## @param command
  877. ## @param [...]
  878. function hadoop_connect_to_hosts
  879. {
  880. # shellcheck disable=SC2124
  881. local params="$@"
  882. local worker_file
  883. local tmpslvnames
  884. #
  885. # ssh (or whatever) to a host
  886. #
  887. # User can specify hostnames or a file where the hostnames are (not both)
  888. if [[ -n "${HADOOP_WORKERS}" && -n "${HADOOP_WORKER_NAMES}" ]] ; then
  889. hadoop_error "ERROR: Both HADOOP_WORKERS and HADOOP_WORKER_NAMES were defined. Aborting."
  890. exit 1
  891. elif [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  892. if [[ -n "${HADOOP_WORKERS}" ]]; then
  893. worker_file=${HADOOP_WORKERS}
  894. elif [[ -f "${HADOOP_CONF_DIR}/workers" ]]; then
  895. worker_file=${HADOOP_CONF_DIR}/workers
  896. elif [[ -f "${HADOOP_CONF_DIR}/slaves" ]]; then
  897. hadoop_error "WARNING: 'slaves' file has been deprecated. Please use 'workers' file instead."
  898. worker_file=${HADOOP_CONF_DIR}/slaves
  899. fi
  900. fi
  901. # if pdsh is available, let's use it. otherwise default
  902. # to a loop around ssh. (ugh)
  903. if [[ -e '/usr/bin/pdsh' ]]; then
  904. if [[ -z "${HADOOP_WORKER_NAMES}" ]] ; then
  905. # if we were given a file, just let pdsh deal with it.
  906. # shellcheck disable=SC2086
  907. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  908. -f "${HADOOP_SSH_PARALLEL}" -w ^"${worker_file}" $"${@// /\\ }" 2>&1
  909. else
  910. # no spaces allowed in the pdsh arg host list
  911. # shellcheck disable=SC2086
  912. tmpslvnames=$(echo ${HADOOP_WORKER_NAMES} | tr -s ' ' ,)
  913. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  914. -f "${HADOOP_SSH_PARALLEL}" \
  915. -w "${tmpslvnames}" $"${@// /\\ }" 2>&1
  916. fi
  917. else
  918. if [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  919. HADOOP_WORKER_NAMES=$(sed 's/#.*$//;/^$/d' "${worker_file}")
  920. fi
  921. hadoop_connect_to_hosts_without_pdsh "${params}"
  922. fi
  923. }
  924. ## @description Connect to ${HADOOP_WORKER_NAMES} and execute command
  925. ## @description under the environment which does not support pdsh.
  926. ## @audience private
  927. ## @stability evolving
  928. ## @replaceable yes
  929. ## @param command
  930. ## @param [...]
  931. function hadoop_connect_to_hosts_without_pdsh
  932. {
  933. # shellcheck disable=SC2124
  934. local params="$@"
  935. local workers=(${HADOOP_WORKER_NAMES})
  936. for (( i = 0; i < ${#workers[@]}; i++ ))
  937. do
  938. if (( i != 0 && i % HADOOP_SSH_PARALLEL == 0 )); then
  939. wait
  940. fi
  941. # shellcheck disable=SC2086
  942. hadoop_actual_ssh "${workers[$i]}" ${params} &
  943. done
  944. wait
  945. }
  946. ## @description Utility routine to handle --workers mode
  947. ## @audience private
  948. ## @stability evolving
  949. ## @replaceable yes
  950. ## @param commandarray
  951. function hadoop_common_worker_mode_execute
  952. {
  953. #
  954. # input should be the command line as given by the user
  955. # in the form of an array
  956. #
  957. local argv=("$@")
  958. # if --workers is still on the command line, remove it
  959. # to prevent loops
  960. # Also remove --hostnames and --hosts along with arg values
  961. local argsSize=${#argv[@]};
  962. for (( i = 0; i < argsSize; i++ ))
  963. do
  964. if [[ "${argv[$i]}" =~ ^--workers$ ]]; then
  965. unset argv[$i]
  966. elif [[ "${argv[$i]}" =~ ^--hostnames$ ]] ||
  967. [[ "${argv[$i]}" =~ ^--hosts$ ]]; then
  968. unset argv[$i];
  969. let i++;
  970. unset argv[$i];
  971. fi
  972. done
  973. if [[ ${QATESTMODE} = true ]]; then
  974. echo "${argv[@]}"
  975. return
  976. fi
  977. hadoop_connect_to_hosts -- "${argv[@]}"
  978. }
  979. ## @description Verify that a shell command was passed a valid
  980. ## @description class name
  981. ## @audience public
  982. ## @stability stable
  983. ## @replaceable yes
  984. ## @param classname
  985. ## @return 0 = success
  986. ## @return 1 = failure w/user message
  987. function hadoop_validate_classname
  988. {
  989. local class=$1
  990. shift 1
  991. if [[ ! ${class} =~ \. ]]; then
  992. # assuming the arg is typo of command if it does not conatain ".".
  993. # class belonging to no package is not allowed as a result.
  994. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  995. return 1
  996. fi
  997. return 0
  998. }
  999. ## @description Append the `appendstring` if `checkstring` is not
  1000. ## @description present in the given `envvar`
  1001. ## @audience public
  1002. ## @stability stable
  1003. ## @replaceable yes
  1004. ## @param envvar
  1005. ## @param checkstring
  1006. ## @param appendstring
  1007. function hadoop_add_param
  1008. {
  1009. #
  1010. # general param dedupe..
  1011. # $1 is what we are adding to
  1012. # $2 is the name of what we want to add (key)
  1013. # $3 is the key+value of what we're adding
  1014. #
  1015. # doing it this way allows us to support all sorts of
  1016. # different syntaxes, just so long as they are space
  1017. # delimited
  1018. #
  1019. if [[ ! ${!1} =~ $2 ]] ; then
  1020. #shellcheck disable=SC2140
  1021. eval "$1"="'${!1} $3'"
  1022. if [[ ${!1:0:1} = ' ' ]]; then
  1023. #shellcheck disable=SC2140
  1024. eval "$1"="'${!1# }'"
  1025. fi
  1026. hadoop_debug "$1 accepted $3"
  1027. else
  1028. hadoop_debug "$1 declined $3"
  1029. fi
  1030. }
  1031. ## @description Register the given `shellprofile` to the Hadoop
  1032. ## @description shell subsystem
  1033. ## @audience public
  1034. ## @stability stable
  1035. ## @replaceable yes
  1036. ## @param shellprofile
  1037. function hadoop_add_profile
  1038. {
  1039. # shellcheck disable=SC2086
  1040. hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
  1041. }
  1042. ## @description Add a file system object (directory, file,
  1043. ## @description wildcard, ...) to the classpath. Optionally provide
  1044. ## @description a hint as to where in the classpath it should go.
  1045. ## @audience public
  1046. ## @stability stable
  1047. ## @replaceable yes
  1048. ## @param object
  1049. ## @param [before|after]
  1050. ## @return 0 = success (added or duplicate)
  1051. ## @return 1 = failure (doesn't exist or some other reason)
  1052. function hadoop_add_classpath
  1053. {
  1054. # However, with classpath (& JLP), we can do dedupe
  1055. # along with some sanity checking (e.g., missing directories)
  1056. # since we have a better idea of what is legal
  1057. #
  1058. # for wildcard at end, we can
  1059. # at least check the dir exists
  1060. if [[ $1 =~ ^.*\*$ ]]; then
  1061. local mp
  1062. mp=$(dirname "$1")
  1063. if [[ ! -d "${mp}" ]]; then
  1064. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  1065. return 1
  1066. fi
  1067. # no wildcard in the middle, so check existence
  1068. # (doesn't matter *what* it is)
  1069. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  1070. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  1071. return 1
  1072. fi
  1073. if [[ -z "${CLASSPATH}" ]]; then
  1074. CLASSPATH=$1
  1075. hadoop_debug "Initial CLASSPATH=$1"
  1076. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  1077. if [[ "$2" = "before" ]]; then
  1078. CLASSPATH="$1:${CLASSPATH}"
  1079. hadoop_debug "Prepend CLASSPATH: $1"
  1080. else
  1081. CLASSPATH+=:$1
  1082. hadoop_debug "Append CLASSPATH: $1"
  1083. fi
  1084. else
  1085. hadoop_debug "Dupe CLASSPATH: $1"
  1086. fi
  1087. return 0
  1088. }
  1089. ## @description Add a file system object (directory, file,
  1090. ## @description wildcard, ...) to the colonpath. Optionally provide
  1091. ## @description a hint as to where in the colonpath it should go.
  1092. ## @description Prior to adding, objects are checked for duplication
  1093. ## @description and check for existence. Many other functions use
  1094. ## @description this function as their base implementation
  1095. ## @description including `hadoop_add_javalibpath` and `hadoop_add_ldlibpath`.
  1096. ## @audience public
  1097. ## @stability stable
  1098. ## @replaceable yes
  1099. ## @param envvar
  1100. ## @param object
  1101. ## @param [before|after]
  1102. ## @return 0 = success (added or duplicate)
  1103. ## @return 1 = failure (doesn't exist or some other reason)
  1104. function hadoop_add_colonpath
  1105. {
  1106. # this is CLASSPATH, JLP, etc but with dedupe but no
  1107. # other checking
  1108. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  1109. if [[ -z "${!1}" ]]; then
  1110. # shellcheck disable=SC2086
  1111. eval $1="'$2'"
  1112. hadoop_debug "Initial colonpath($1): $2"
  1113. elif [[ "$3" = "before" ]]; then
  1114. # shellcheck disable=SC2086
  1115. eval $1="'$2:${!1}'"
  1116. hadoop_debug "Prepend colonpath($1): $2"
  1117. else
  1118. # shellcheck disable=SC2086
  1119. eval $1+=":'$2'"
  1120. hadoop_debug "Append colonpath($1): $2"
  1121. fi
  1122. return 0
  1123. fi
  1124. hadoop_debug "Rejected colonpath($1): $2"
  1125. return 1
  1126. }
  1127. ## @description Add a file system object (directory, file,
  1128. ## @description wildcard, ...) to the Java JNI path. Optionally
  1129. ## @description provide a hint as to where in the Java JNI path
  1130. ## @description it should go.
  1131. ## @audience public
  1132. ## @stability stable
  1133. ## @replaceable yes
  1134. ## @param object
  1135. ## @param [before|after]
  1136. ## @return 0 = success (added or duplicate)
  1137. ## @return 1 = failure (doesn't exist or some other reason)
  1138. function hadoop_add_javalibpath
  1139. {
  1140. # specialized function for a common use case
  1141. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  1142. }
  1143. ## @description Add a file system object (directory, file,
  1144. ## @description wildcard, ...) to the LD_LIBRARY_PATH. Optionally
  1145. ## @description provide a hint as to where in the LD_LIBRARY_PATH
  1146. ## @description it should go.
  1147. ## @audience public
  1148. ## @stability stable
  1149. ## @replaceable yes
  1150. ## @param object
  1151. ## @param [before|after]
  1152. ## @return 0 = success (added or duplicate)
  1153. ## @return 1 = failure (doesn't exist or some other reason)
  1154. function hadoop_add_ldlibpath
  1155. {
  1156. local status
  1157. # specialized function for a common use case
  1158. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  1159. status=$?
  1160. # note that we export this
  1161. export LD_LIBRARY_PATH
  1162. return ${status}
  1163. }
  1164. ## @description Add the common/core Hadoop components to the
  1165. ## @description environment
  1166. ## @audience private
  1167. ## @stability evolving
  1168. ## @replaceable yes
  1169. ## @returns 1 on failure, may exit
  1170. ## @returns 0 on success
  1171. function hadoop_add_common_to_classpath
  1172. {
  1173. #
  1174. # get all of the common jars+config in the path
  1175. #
  1176. if [[ -z "${HADOOP_COMMON_HOME}"
  1177. || -z "${HADOOP_COMMON_DIR}"
  1178. || -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
  1179. hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
  1180. hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
  1181. hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
  1182. hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
  1183. exit 1
  1184. fi
  1185. # developers
  1186. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  1187. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  1188. fi
  1189. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  1190. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  1191. }
  1192. ## @description Run libexec/tools/module.sh to add to the classpath
  1193. ## @description environment
  1194. ## @audience private
  1195. ## @stability evolving
  1196. ## @replaceable yes
  1197. ## @param module
  1198. function hadoop_add_to_classpath_tools
  1199. {
  1200. declare module=$1
  1201. if [[ -f "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh" ]]; then
  1202. # shellcheck disable=SC1090
  1203. . "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh"
  1204. else
  1205. hadoop_error "ERROR: Tools helper ${HADOOP_LIBEXEC_DIR}/tools/${module}.sh was not found."
  1206. fi
  1207. if declare -f hadoop_classpath_tools_${module} >/dev/null 2>&1; then
  1208. "hadoop_classpath_tools_${module}"
  1209. fi
  1210. }
  1211. ## @description Add the user's custom classpath settings to the
  1212. ## @description environment
  1213. ## @audience private
  1214. ## @stability evolving
  1215. ## @replaceable yes
  1216. function hadoop_add_to_classpath_userpath
  1217. {
  1218. # Add the user-specified HADOOP_CLASSPATH to the
  1219. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  1220. # is not set.
  1221. # Add it first or last depending on if user has
  1222. # set env-var HADOOP_USER_CLASSPATH_FIRST
  1223. # we'll also dedupe it, because we're cool like that.
  1224. #
  1225. declare -a array
  1226. declare -i c=0
  1227. declare -i j
  1228. declare -i i
  1229. declare idx
  1230. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  1231. # I wonder if Java runs on VMS.
  1232. for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  1233. array[${c}]=${idx}
  1234. ((c=c+1))
  1235. done
  1236. # bats gets confused by j getting set to 0
  1237. ((j=c-1)) || ${QATESTMODE}
  1238. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  1239. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  1240. for ((i=0; i<=j; i++)); do
  1241. hadoop_add_classpath "${array[$i]}" after
  1242. done
  1243. else
  1244. for ((i=j; i>=0; i--)); do
  1245. hadoop_add_classpath "${array[$i]}" before
  1246. done
  1247. fi
  1248. fi
  1249. fi
  1250. }
  1251. ## @description Routine to configure any OS-specific settings.
  1252. ## @audience public
  1253. ## @stability stable
  1254. ## @replaceable yes
  1255. ## @return may exit on failure conditions
  1256. function hadoop_os_tricks
  1257. {
  1258. local bindv6only
  1259. HADOOP_IS_CYGWIN=false
  1260. case ${HADOOP_OS_TYPE} in
  1261. Darwin)
  1262. if [[ -z "${JAVA_HOME}" ]]; then
  1263. if [[ -x /usr/libexec/java_home ]]; then
  1264. JAVA_HOME="$(/usr/libexec/java_home)"
  1265. export JAVA_HOME
  1266. else
  1267. JAVA_HOME=/Library/Java/Home
  1268. export JAVA_HOME
  1269. fi
  1270. fi
  1271. ;;
  1272. Linux)
  1273. # Newer versions of glibc use an arena memory allocator that
  1274. # causes virtual # memory usage to explode. This interacts badly
  1275. # with the many threads that we use in Hadoop. Tune the variable
  1276. # down to prevent vmem explosion.
  1277. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  1278. # we put this in QA test mode off so that non-Linux can test
  1279. if [[ "${QATESTMODE}" = true ]]; then
  1280. return
  1281. fi
  1282. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  1283. # undocumented in hadoop-env.sh because we don't want users to
  1284. # shoot themselves in the foot while devs make IPv6 work.
  1285. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  1286. if [[ -n "${bindv6only}" ]] &&
  1287. [[ "${bindv6only}" -eq "1" ]] &&
  1288. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  1289. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  1290. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  1291. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  1292. exit 1
  1293. fi
  1294. ;;
  1295. CYGWIN*)
  1296. # Flag that we're running on Cygwin to trigger path translation later.
  1297. HADOOP_IS_CYGWIN=true
  1298. ;;
  1299. esac
  1300. }
  1301. ## @description Configure/verify ${JAVA_HOME}
  1302. ## @audience public
  1303. ## @stability stable
  1304. ## @replaceable yes
  1305. ## @return may exit on failure conditions
  1306. function hadoop_java_setup
  1307. {
  1308. # Bail if we did not detect it
  1309. if [[ -z "${JAVA_HOME}" ]]; then
  1310. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  1311. exit 1
  1312. fi
  1313. if [[ ! -d "${JAVA_HOME}" ]]; then
  1314. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  1315. exit 1
  1316. fi
  1317. JAVA="${JAVA_HOME}/bin/java"
  1318. if [[ ! -x "$JAVA" ]]; then
  1319. hadoop_error "ERROR: $JAVA is not executable."
  1320. exit 1
  1321. fi
  1322. }
  1323. ## @description Finish Java JNI paths prior to execution
  1324. ## @audience private
  1325. ## @stability evolving
  1326. ## @replaceable yes
  1327. function hadoop_finalize_libpaths
  1328. {
  1329. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1330. hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
  1331. hadoop_add_param HADOOP_OPTS java.library.path \
  1332. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1333. export LD_LIBRARY_PATH
  1334. fi
  1335. }
  1336. ## @description Finish Java heap parameters prior to execution
  1337. ## @audience private
  1338. ## @stability evolving
  1339. ## @replaceable yes
  1340. function hadoop_finalize_hadoop_heap
  1341. {
  1342. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  1343. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  1344. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  1345. fi
  1346. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  1347. fi
  1348. # backwards compatibility
  1349. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  1350. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  1351. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  1352. fi
  1353. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  1354. fi
  1355. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  1356. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  1357. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  1358. fi
  1359. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  1360. fi
  1361. }
  1362. ## @description Converts the contents of the variable name
  1363. ## @description `varnameref` into the equivalent Windows path.
  1364. ## @description If the second parameter is true, then `varnameref`
  1365. ## @description is treated as though it was a path list.
  1366. ## @audience public
  1367. ## @stability stable
  1368. ## @replaceable yes
  1369. ## @param varnameref
  1370. ## @param [true]
  1371. function hadoop_translate_cygwin_path
  1372. {
  1373. if [[ "${HADOOP_IS_CYGWIN}" = "true" ]]; then
  1374. if [[ "$2" = "true" ]]; then
  1375. #shellcheck disable=SC2016
  1376. eval "$1"='$(cygpath -p -w "${!1}" 2>/dev/null)'
  1377. else
  1378. #shellcheck disable=SC2016
  1379. eval "$1"='$(cygpath -w "${!1}" 2>/dev/null)'
  1380. fi
  1381. fi
  1382. }
  1383. ## @description Adds the HADOOP_CLIENT_OPTS variable to
  1384. ## @description HADOOP_OPTS if HADOOP_SUBCMD_SUPPORTDAEMONIZATION is false
  1385. ## @audience public
  1386. ## @stability stable
  1387. ## @replaceable yes
  1388. function hadoop_add_client_opts
  1389. {
  1390. if [[ "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" = false
  1391. || -z "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
  1392. hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
  1393. HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
  1394. fi
  1395. }
  1396. ## @description Finish configuring Hadoop specific system properties
  1397. ## @description prior to executing Java
  1398. ## @audience private
  1399. ## @stability evolving
  1400. ## @replaceable yes
  1401. function hadoop_finalize_hadoop_opts
  1402. {
  1403. hadoop_translate_cygwin_path HADOOP_LOG_DIR
  1404. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  1405. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  1406. hadoop_translate_cygwin_path HADOOP_HOME
  1407. export HADOOP_HOME
  1408. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1409. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  1410. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  1411. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  1412. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  1413. }
  1414. ## @description Finish Java classpath prior to execution
  1415. ## @audience private
  1416. ## @stability evolving
  1417. ## @replaceable yes
  1418. function hadoop_finalize_classpath
  1419. {
  1420. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  1421. # user classpath gets added at the last minute. this allows
  1422. # override of CONF dirs and more
  1423. hadoop_add_to_classpath_userpath
  1424. hadoop_translate_cygwin_path CLASSPATH true
  1425. }
  1426. ## @description Finish all the remaining environment settings prior
  1427. ## @description to executing Java. This is a wrapper that calls
  1428. ## @description the other `finalize` routines.
  1429. ## @audience private
  1430. ## @stability evolving
  1431. ## @replaceable yes
  1432. function hadoop_finalize
  1433. {
  1434. hadoop_shellprofiles_finalize
  1435. hadoop_finalize_classpath
  1436. hadoop_finalize_libpaths
  1437. hadoop_finalize_hadoop_heap
  1438. hadoop_finalize_hadoop_opts
  1439. hadoop_translate_cygwin_path HADOOP_HOME
  1440. hadoop_translate_cygwin_path HADOOP_CONF_DIR
  1441. hadoop_translate_cygwin_path HADOOP_COMMON_HOME
  1442. hadoop_translate_cygwin_path HADOOP_HDFS_HOME
  1443. hadoop_translate_cygwin_path HADOOP_YARN_HOME
  1444. hadoop_translate_cygwin_path HADOOP_MAPRED_HOME
  1445. }
  1446. ## @description Print usage information and exit with the passed
  1447. ## @description `exitcode`
  1448. ## @audience public
  1449. ## @stability stable
  1450. ## @replaceable no
  1451. ## @param exitcode
  1452. ## @return This function will always exit.
  1453. function hadoop_exit_with_usage
  1454. {
  1455. local exitcode=$1
  1456. if [[ -z $exitcode ]]; then
  1457. exitcode=1
  1458. fi
  1459. # shellcheck disable=SC2034
  1460. if declare -F hadoop_usage >/dev/null ; then
  1461. hadoop_usage
  1462. elif [[ -x /usr/bin/cowsay ]]; then
  1463. /usr/bin/cowsay -f elephant "Sorry, no help available."
  1464. else
  1465. hadoop_error "Sorry, no help available."
  1466. fi
  1467. exit $exitcode
  1468. }
  1469. ## @description Verify that prerequisites have been met prior to
  1470. ## @description excuting a privileged program.
  1471. ## @audience private
  1472. ## @stability evolving
  1473. ## @replaceable yes
  1474. ## @return This routine may exit.
  1475. function hadoop_verify_secure_prereq
  1476. {
  1477. # if you are on an OS like Illumos that has functional roles
  1478. # and you are using pfexec, you'll probably want to change
  1479. # this.
  1480. if ! hadoop_privilege_check && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  1481. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  1482. exit 1
  1483. else
  1484. return 0
  1485. fi
  1486. }
  1487. ## @audience private
  1488. ## @stability evolving
  1489. ## @replaceable yes
  1490. function hadoop_setup_secure_service
  1491. {
  1492. # need a more complicated setup? replace me!
  1493. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  1494. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  1495. }
  1496. ## @audience private
  1497. ## @stability evolving
  1498. ## @replaceable yes
  1499. function hadoop_verify_piddir
  1500. {
  1501. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  1502. hadoop_error "No pid directory defined."
  1503. exit 1
  1504. fi
  1505. hadoop_mkdir "${HADOOP_PID_DIR}"
  1506. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1507. if [[ $? -gt 0 ]]; then
  1508. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  1509. exit 1
  1510. fi
  1511. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1512. }
  1513. ## @audience private
  1514. ## @stability evolving
  1515. ## @replaceable yes
  1516. function hadoop_verify_logdir
  1517. {
  1518. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  1519. hadoop_error "No log directory defined."
  1520. exit 1
  1521. fi
  1522. hadoop_mkdir "${HADOOP_LOG_DIR}"
  1523. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1524. if [[ $? -gt 0 ]]; then
  1525. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  1526. exit 1
  1527. fi
  1528. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1529. }
  1530. ## @description Determine the status of the daemon referenced
  1531. ## @description by `pidfile`
  1532. ## @audience public
  1533. ## @stability stable
  1534. ## @replaceable yes
  1535. ## @param pidfile
  1536. ## @return (mostly) LSB 4.1.0 compatible status
  1537. function hadoop_status_daemon
  1538. {
  1539. #
  1540. # LSB 4.1.0 compatible status command (1)
  1541. #
  1542. # 0 = program is running
  1543. # 1 = dead, but still a pid (2)
  1544. # 2 = (not used by us)
  1545. # 3 = not running
  1546. #
  1547. # 1 - this is not an endorsement of the LSB
  1548. #
  1549. # 2 - technically, the specification says /var/run/pid, so
  1550. # we should never return this value, but we're giving
  1551. # them the benefit of a doubt and returning 1 even if
  1552. # our pid is not in in /var/run .
  1553. #
  1554. local pidfile=$1
  1555. shift
  1556. local pid
  1557. local pspid
  1558. if [[ -f "${pidfile}" ]]; then
  1559. pid=$(cat "${pidfile}")
  1560. if pspid=$(ps -o args= -p"${pid}" 2>/dev/null); then
  1561. # this is to check that the running process we found is actually the same
  1562. # daemon that we're interested in
  1563. if [[ ${pspid} =~ -Dproc_${daemonname} ]]; then
  1564. return 0
  1565. fi
  1566. fi
  1567. return 1
  1568. fi
  1569. return 3
  1570. }
  1571. ## @description Execute the Java `class`, passing along any `options`.
  1572. ## @description Additionally, set the Java property -Dproc_`command`.
  1573. ## @audience public
  1574. ## @stability stable
  1575. ## @replaceable yes
  1576. ## @param command
  1577. ## @param class
  1578. ## @param [options]
  1579. function hadoop_java_exec
  1580. {
  1581. # run a java command. this is used for
  1582. # non-daemons
  1583. local command=$1
  1584. local class=$2
  1585. shift 2
  1586. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1587. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1588. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1589. hadoop_debug "java: ${JAVA}"
  1590. hadoop_debug "Class name: ${class}"
  1591. hadoop_debug "Command line options: $*"
  1592. export CLASSPATH
  1593. #shellcheck disable=SC2086
  1594. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1595. }
  1596. ## @description Start a non-privileged daemon in the foreground.
  1597. ## @audience private
  1598. ## @stability evolving
  1599. ## @replaceable yes
  1600. ## @param command
  1601. ## @param class
  1602. ## @param pidfile
  1603. ## @param [options]
  1604. function hadoop_start_daemon
  1605. {
  1606. # this is our non-privileged daemon starter
  1607. # that fires up a daemon in the *foreground*
  1608. # so complex! so wow! much java!
  1609. local command=$1
  1610. local class=$2
  1611. local pidfile=$3
  1612. shift 3
  1613. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1614. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1615. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1616. hadoop_debug "java: ${JAVA}"
  1617. hadoop_debug "Class name: ${class}"
  1618. hadoop_debug "Command line options: $*"
  1619. # this is for the non-daemon pid creation
  1620. #shellcheck disable=SC2086
  1621. echo $$ > "${pidfile}" 2>/dev/null
  1622. if [[ $? -gt 0 ]]; then
  1623. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  1624. fi
  1625. export CLASSPATH
  1626. #shellcheck disable=SC2086
  1627. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1628. }
  1629. ## @description Start a non-privileged daemon in the background.
  1630. ## @audience private
  1631. ## @stability evolving
  1632. ## @replaceable yes
  1633. ## @param command
  1634. ## @param class
  1635. ## @param pidfile
  1636. ## @param outfile
  1637. ## @param [options]
  1638. function hadoop_start_daemon_wrapper
  1639. {
  1640. local daemonname=$1
  1641. local class=$2
  1642. local pidfile=$3
  1643. local outfile=$4
  1644. shift 4
  1645. local counter
  1646. hadoop_rotate_log "${outfile}"
  1647. hadoop_start_daemon "${daemonname}" \
  1648. "$class" \
  1649. "${pidfile}" \
  1650. "$@" >> "${outfile}" 2>&1 < /dev/null &
  1651. # we need to avoid a race condition here
  1652. # so let's wait for the fork to finish
  1653. # before overriding with the daemonized pid
  1654. (( counter=0 ))
  1655. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  1656. sleep 1
  1657. (( counter++ ))
  1658. done
  1659. # this is for daemon pid creation
  1660. #shellcheck disable=SC2086
  1661. echo $! > "${pidfile}" 2>/dev/null
  1662. if [[ $? -gt 0 ]]; then
  1663. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  1664. fi
  1665. # shellcheck disable=SC2086
  1666. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1667. if [[ $? -gt 0 ]]; then
  1668. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1669. fi
  1670. # shellcheck disable=SC2086
  1671. disown %+ >/dev/null 2>&1
  1672. if [[ $? -gt 0 ]]; then
  1673. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1674. fi
  1675. sleep 1
  1676. # capture the ulimit output
  1677. ulimit -a >> "${outfile}" 2>&1
  1678. # shellcheck disable=SC2086
  1679. if ! ps -p $! >/dev/null 2>&1; then
  1680. return 1
  1681. fi
  1682. return 0
  1683. }
  1684. ## @description Start a privileged daemon in the foreground.
  1685. ## @audience private
  1686. ## @stability evolving
  1687. ## @replaceable yes
  1688. ## @param command
  1689. ## @param class
  1690. ## @param daemonpidfile
  1691. ## @param daemonoutfile
  1692. ## @param daemonerrfile
  1693. ## @param wrapperpidfile
  1694. ## @param [options]
  1695. function hadoop_start_secure_daemon
  1696. {
  1697. # this is used to launch a secure daemon in the *foreground*
  1698. #
  1699. local daemonname=$1
  1700. local class=$2
  1701. # pid file to create for our daemon
  1702. local daemonpidfile=$3
  1703. # where to send stdout. jsvc has bad habits so this *may* be &1
  1704. # which means you send it to stdout!
  1705. local daemonoutfile=$4
  1706. # where to send stderr. same thing, except &2 = stderr
  1707. local daemonerrfile=$5
  1708. local privpidfile=$6
  1709. shift 6
  1710. hadoop_rotate_log "${daemonoutfile}"
  1711. hadoop_rotate_log "${daemonerrfile}"
  1712. # shellcheck disable=SC2153
  1713. jsvc="${JSVC_HOME}/jsvc"
  1714. if [[ ! -f "${jsvc}" ]]; then
  1715. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  1716. hadoop_error "or privileged daemons. Please download and install jsvc from "
  1717. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  1718. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  1719. exit 1
  1720. fi
  1721. # note that shellcheck will throw a
  1722. # bogus for-our-use-case 2086 here.
  1723. # it doesn't properly support multi-line situations
  1724. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1725. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1726. hadoop_debug "Final JSVC_HOME: ${JSVC_HOME}"
  1727. hadoop_debug "jsvc: ${jsvc}"
  1728. hadoop_debug "Final HADOOP_DAEMON_JSVC_EXTRA_OPTS: ${HADOOP_DAEMON_JSVC_EXTRA_OPTS}"
  1729. hadoop_debug "Class name: ${class}"
  1730. hadoop_debug "Command line options: $*"
  1731. #shellcheck disable=SC2086
  1732. echo $$ > "${privpidfile}" 2>/dev/null
  1733. if [[ $? -gt 0 ]]; then
  1734. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  1735. fi
  1736. # shellcheck disable=SC2086
  1737. exec "${jsvc}" \
  1738. "-Dproc_${daemonname}" \
  1739. ${HADOOP_DAEMON_JSVC_EXTRA_OPTS} \
  1740. -outfile "${daemonoutfile}" \
  1741. -errfile "${daemonerrfile}" \
  1742. -pidfile "${daemonpidfile}" \
  1743. -nodetach \
  1744. -user "${HADOOP_SECURE_USER}" \
  1745. -cp "${CLASSPATH}" \
  1746. ${HADOOP_OPTS} \
  1747. "${class}" "$@"
  1748. }
  1749. ## @description Start a privileged daemon in the background.
  1750. ## @audience private
  1751. ## @stability evolving
  1752. ## @replaceable yes
  1753. ## @param command
  1754. ## @param class
  1755. ## @param daemonpidfile
  1756. ## @param daemonoutfile
  1757. ## @param wrapperpidfile
  1758. ## @param warpperoutfile
  1759. ## @param daemonerrfile
  1760. ## @param [options]
  1761. function hadoop_start_secure_daemon_wrapper
  1762. {
  1763. # this wraps hadoop_start_secure_daemon to take care
  1764. # of the dirty work to launch a daemon in the background!
  1765. local daemonname=$1
  1766. local class=$2
  1767. # same rules as hadoop_start_secure_daemon except we
  1768. # have some additional parameters
  1769. local daemonpidfile=$3
  1770. local daemonoutfile=$4
  1771. # the pid file of the subprocess that spawned our
  1772. # secure launcher
  1773. local jsvcpidfile=$5
  1774. # the output of the subprocess that spawned our secure
  1775. # launcher
  1776. local jsvcoutfile=$6
  1777. local daemonerrfile=$7
  1778. shift 7
  1779. local counter
  1780. hadoop_rotate_log "${jsvcoutfile}"
  1781. hadoop_start_secure_daemon \
  1782. "${daemonname}" \
  1783. "${class}" \
  1784. "${daemonpidfile}" \
  1785. "${daemonoutfile}" \
  1786. "${daemonerrfile}" \
  1787. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  1788. # we need to avoid a race condition here
  1789. # so let's wait for the fork to finish
  1790. # before overriding with the daemonized pid
  1791. (( counter=0 ))
  1792. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  1793. sleep 1
  1794. (( counter++ ))
  1795. done
  1796. #shellcheck disable=SC2086
  1797. if ! echo $! > "${jsvcpidfile}"; then
  1798. hadoop_error "ERROR: Cannot write ${daemonname} pid ${jsvcpidfile}."
  1799. fi
  1800. sleep 1
  1801. #shellcheck disable=SC2086
  1802. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1803. if [[ $? -gt 0 ]]; then
  1804. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1805. fi
  1806. if [[ -f "${daemonpidfile}" ]]; then
  1807. #shellcheck disable=SC2046
  1808. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  1809. if [[ $? -gt 0 ]]; then
  1810. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  1811. fi
  1812. fi
  1813. #shellcheck disable=SC2046
  1814. disown %+ >/dev/null 2>&1
  1815. if [[ $? -gt 0 ]]; then
  1816. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1817. fi
  1818. # capture the ulimit output
  1819. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  1820. #shellcheck disable=SC2086
  1821. if ! ps -p $! >/dev/null 2>&1; then
  1822. return 1
  1823. fi
  1824. return 0
  1825. }
  1826. ## @description Wait till process dies or till timeout
  1827. ## @audience private
  1828. ## @stability evolving
  1829. ## @param pid
  1830. ## @param timeout
  1831. function wait_process_to_die_or_timeout
  1832. {
  1833. local pid=$1
  1834. local timeout=$2
  1835. # Normalize timeout
  1836. # Round up or down
  1837. timeout=$(printf "%.0f\n" "${timeout}")
  1838. if [[ ${timeout} -lt 1 ]]; then
  1839. # minimum 1 second
  1840. timeout=1
  1841. fi
  1842. # Wait to see if it's still alive
  1843. for (( i=0; i < "${timeout}"; i++ ))
  1844. do
  1845. if kill -0 "${pid}" > /dev/null 2>&1; then
  1846. sleep 1
  1847. else
  1848. break
  1849. fi
  1850. done
  1851. }
  1852. ## @description Stop the non-privileged `command` daemon with that
  1853. ## @description that is running at `pidfile`.
  1854. ## @audience public
  1855. ## @stability stable
  1856. ## @replaceable yes
  1857. ## @param command
  1858. ## @param pidfile
  1859. function hadoop_stop_daemon
  1860. {
  1861. local cmd=$1
  1862. local pidfile=$2
  1863. shift 2
  1864. local pid
  1865. local cur_pid
  1866. if [[ -f "${pidfile}" ]]; then
  1867. pid=$(cat "$pidfile")
  1868. kill "${pid}" >/dev/null 2>&1
  1869. wait_process_to_die_or_timeout "${pid}" "${HADOOP_STOP_TIMEOUT}"
  1870. if kill -0 "${pid}" > /dev/null 2>&1; then
  1871. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  1872. kill -9 "${pid}" >/dev/null 2>&1
  1873. fi
  1874. wait_process_to_die_or_timeout "${pid}" "${HADOOP_STOP_TIMEOUT}"
  1875. if ps -p "${pid}" > /dev/null 2>&1; then
  1876. hadoop_error "ERROR: Unable to kill ${pid}"
  1877. else
  1878. cur_pid=$(cat "$pidfile")
  1879. if [[ "${pid}" = "${cur_pid}" ]]; then
  1880. rm -f "${pidfile}" >/dev/null 2>&1
  1881. else
  1882. hadoop_error "WARNING: pid has changed for ${cmd}, skip deleting pid file"
  1883. fi
  1884. fi
  1885. fi
  1886. }
  1887. ## @description Stop the privileged `command` daemon with that
  1888. ## @description that is running at `daemonpidfile` and launched with
  1889. ## @description the wrapper at `wrapperpidfile`.
  1890. ## @audience public
  1891. ## @stability stable
  1892. ## @replaceable yes
  1893. ## @param command
  1894. ## @param daemonpidfile
  1895. ## @param wrapperpidfile
  1896. function hadoop_stop_secure_daemon
  1897. {
  1898. local command=$1
  1899. local daemonpidfile=$2
  1900. local privpidfile=$3
  1901. shift 3
  1902. local ret
  1903. local daemon_pid
  1904. local priv_pid
  1905. local cur_daemon_pid
  1906. local cur_priv_pid
  1907. daemon_pid=$(cat "$daemonpidfile")
  1908. priv_pid=$(cat "$privpidfile")
  1909. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  1910. ret=$?
  1911. cur_daemon_pid=$(cat "$daemonpidfile")
  1912. cur_priv_pid=$(cat "$privpidfile")
  1913. if [[ "${daemon_pid}" = "${cur_daemon_pid}" ]]; then
  1914. rm -f "${daemonpidfile}" >/dev/null 2>&1
  1915. else
  1916. hadoop_error "WARNING: daemon pid has changed for ${command}, skip deleting daemon pid file"
  1917. fi
  1918. if [[ "${priv_pid}" = "${cur_priv_pid}" ]]; then
  1919. rm -f "${privpidfile}" >/dev/null 2>&1
  1920. else
  1921. hadoop_error "WARNING: priv pid has changed for ${command}, skip deleting priv pid file"
  1922. fi
  1923. return ${ret}
  1924. }
  1925. ## @description Manage a non-privileged daemon.
  1926. ## @audience private
  1927. ## @stability evolving
  1928. ## @replaceable yes
  1929. ## @param [start|stop|status|default]
  1930. ## @param command
  1931. ## @param class
  1932. ## @param daemonpidfile
  1933. ## @param daemonoutfile
  1934. ## @param [options]
  1935. function hadoop_daemon_handler
  1936. {
  1937. local daemonmode=$1
  1938. local daemonname=$2
  1939. local class=$3
  1940. local daemon_pidfile=$4
  1941. local daemon_outfile=$5
  1942. shift 5
  1943. case ${daemonmode} in
  1944. status)
  1945. hadoop_status_daemon "${daemon_pidfile}"
  1946. exit $?
  1947. ;;
  1948. stop)
  1949. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  1950. exit $?
  1951. ;;
  1952. ##COMPAT -- older hadoops would also start daemons by default
  1953. start|default)
  1954. hadoop_verify_piddir
  1955. hadoop_verify_logdir
  1956. hadoop_status_daemon "${daemon_pidfile}"
  1957. if [[ $? == 0 ]]; then
  1958. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1959. exit 1
  1960. else
  1961. # stale pid file, so just remove it and continue on
  1962. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1963. fi
  1964. ##COMPAT - differenticate between --daemon start and nothing
  1965. # "nothing" shouldn't detach
  1966. if [[ "$daemonmode" = "default" ]]; then
  1967. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  1968. else
  1969. hadoop_start_daemon_wrapper "${daemonname}" \
  1970. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  1971. fi
  1972. ;;
  1973. esac
  1974. }
  1975. ## @description Manage a privileged daemon.
  1976. ## @audience private
  1977. ## @stability evolving
  1978. ## @replaceable yes
  1979. ## @param [start|stop|status|default]
  1980. ## @param command
  1981. ## @param class
  1982. ## @param daemonpidfile
  1983. ## @param daemonoutfile
  1984. ## @param wrapperpidfile
  1985. ## @param wrapperoutfile
  1986. ## @param wrappererrfile
  1987. ## @param [options]
  1988. function hadoop_secure_daemon_handler
  1989. {
  1990. local daemonmode=$1
  1991. local daemonname=$2
  1992. local classname=$3
  1993. local daemon_pidfile=$4
  1994. local daemon_outfile=$5
  1995. local priv_pidfile=$6
  1996. local priv_outfile=$7
  1997. local priv_errfile=$8
  1998. shift 8
  1999. case ${daemonmode} in
  2000. status)
  2001. hadoop_status_daemon "${daemon_pidfile}"
  2002. exit $?
  2003. ;;
  2004. stop)
  2005. hadoop_stop_secure_daemon "${daemonname}" \
  2006. "${daemon_pidfile}" "${priv_pidfile}"
  2007. exit $?
  2008. ;;
  2009. ##COMPAT -- older hadoops would also start daemons by default
  2010. start|default)
  2011. hadoop_verify_piddir
  2012. hadoop_verify_logdir
  2013. hadoop_status_daemon "${daemon_pidfile}"
  2014. if [[ $? == 0 ]]; then
  2015. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  2016. exit 1
  2017. else
  2018. # stale pid file, so just remove it and continue on
  2019. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  2020. fi
  2021. ##COMPAT - differenticate between --daemon start and nothing
  2022. # "nothing" shouldn't detach
  2023. if [[ "${daemonmode}" = "default" ]]; then
  2024. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  2025. "${daemon_pidfile}" "${daemon_outfile}" \
  2026. "${priv_errfile}" "${priv_pidfile}" "$@"
  2027. else
  2028. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  2029. "${daemon_pidfile}" "${daemon_outfile}" \
  2030. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  2031. fi
  2032. ;;
  2033. esac
  2034. }
  2035. ## @description autodetect whether this is a priv subcmd
  2036. ## @description by whether or not a priv user var exists
  2037. ## @description and if HADOOP_SECURE_CLASSNAME is defined
  2038. ## @audience public
  2039. ## @stability stable
  2040. ## @replaceable yes
  2041. ## @param command
  2042. ## @param subcommand
  2043. ## @return 1 = not priv
  2044. ## @return 0 = priv
  2045. function hadoop_detect_priv_subcmd
  2046. {
  2047. declare program=$1
  2048. declare command=$2
  2049. if [[ -z "${HADOOP_SECURE_CLASSNAME}" ]]; then
  2050. hadoop_debug "No secure classname defined."
  2051. return 1
  2052. fi
  2053. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_USER)
  2054. if [[ -z "${!uvar}" ]]; then
  2055. hadoop_debug "No secure user defined."
  2056. return 1
  2057. fi
  2058. return 0
  2059. }
  2060. ## @description Build custom subcommand var
  2061. ## @audience public
  2062. ## @stability stable
  2063. ## @replaceable yes
  2064. ## @param command
  2065. ## @param subcommand
  2066. ## @param customid
  2067. ## @return string
  2068. function hadoop_build_custom_subcmd_var
  2069. {
  2070. declare program=$1
  2071. declare command=$2
  2072. declare custom=$3
  2073. declare uprogram
  2074. declare ucommand
  2075. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  2076. || [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then
  2077. uprogram=$(echo "${program}" | tr '[:lower:]' '[:upper:]')
  2078. ucommand=$(echo "${command}" | tr '[:lower:]' '[:upper:]')
  2079. else
  2080. uprogram=${program^^}
  2081. ucommand=${command^^}
  2082. fi
  2083. echo "${uprogram}_${ucommand}_${custom}"
  2084. }
  2085. ## @description Verify that username in a var converts to user id
  2086. ## @audience public
  2087. ## @stability stable
  2088. ## @replaceable yes
  2089. ## @param userstring
  2090. ## @return 0 for success
  2091. ## @return 1 for failure
  2092. function hadoop_verify_user_resolves
  2093. {
  2094. declare userstr=$1
  2095. if [[ -z ${userstr} || -z ${!userstr} ]] ; then
  2096. return 1
  2097. fi
  2098. id -u "${!userstr}" >/dev/null 2>&1
  2099. }
  2100. ## @description Verify that ${USER} is allowed to execute the
  2101. ## @description given subcommand.
  2102. ## @audience public
  2103. ## @stability stable
  2104. ## @replaceable yes
  2105. ## @param command
  2106. ## @param subcommand
  2107. ## @return return 0 on success
  2108. ## @return exit 1 on failure
  2109. function hadoop_verify_user_perm
  2110. {
  2111. declare program=$1
  2112. declare command=$2
  2113. declare uvar
  2114. if [[ ${command} =~ \. ]]; then
  2115. return 1
  2116. fi
  2117. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  2118. if [[ -n ${!uvar} ]]; then
  2119. if [[ ${!uvar} != "${USER}" ]]; then
  2120. hadoop_error "ERROR: ${command} can only be executed by ${!uvar}."
  2121. exit 1
  2122. fi
  2123. fi
  2124. return 0
  2125. }
  2126. ## @description Verify that ${USER} is allowed to execute the
  2127. ## @description given subcommand.
  2128. ## @audience public
  2129. ## @stability stable
  2130. ## @replaceable yes
  2131. ## @param subcommand
  2132. ## @return 1 on no re-exec needed
  2133. ## @return 0 on need to re-exec
  2134. function hadoop_need_reexec
  2135. {
  2136. declare program=$1
  2137. declare command=$2
  2138. declare uvar
  2139. # we've already been re-execed, bail
  2140. if [[ "${HADOOP_REEXECED_CMD}" = true ]]; then
  2141. return 1
  2142. fi
  2143. if [[ ${command} =~ \. ]]; then
  2144. return 1
  2145. fi
  2146. # if we have privilege, and the _USER is defined, and _USER is
  2147. # set to someone who isn't us, then yes, we should re-exec.
  2148. # otherwise no, don't re-exec and let the system deal with it.
  2149. if hadoop_privilege_check; then
  2150. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  2151. if [[ -n ${!uvar} ]]; then
  2152. if [[ ${!uvar} != "${USER}" ]]; then
  2153. return 0
  2154. fi
  2155. fi
  2156. fi
  2157. return 1
  2158. }
  2159. ## @description Add custom (program)_(command)_OPTS to HADOOP_OPTS.
  2160. ## @description Also handles the deprecated cases from pre-3.x.
  2161. ## @audience public
  2162. ## @stability evolving
  2163. ## @replaceable yes
  2164. ## @param program
  2165. ## @param subcommand
  2166. ## @return will exit on failure conditions
  2167. function hadoop_subcommand_opts
  2168. {
  2169. declare program=$1
  2170. declare command=$2
  2171. declare uvar
  2172. declare depvar
  2173. declare uprogram
  2174. declare ucommand
  2175. if [[ -z "${program}" || -z "${command}" ]]; then
  2176. return 1
  2177. fi
  2178. if [[ ${command} =~ \. ]]; then
  2179. return 1
  2180. fi
  2181. # bash 4 and up have built-in ways to upper and lower
  2182. # case the contents of vars. This is faster than
  2183. # calling tr.
  2184. ## We don't call hadoop_build_custom_subcmd_var here
  2185. ## since we need to construct this for the deprecation
  2186. ## cases. For Hadoop 4.x, this needs to get cleaned up.
  2187. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  2188. || [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then
  2189. uprogram=$(echo "${program}" | tr '[:lower:]' '[:upper:]')
  2190. ucommand=$(echo "${command}" | tr '[:lower:]' '[:upper:]')
  2191. else
  2192. uprogram=${program^^}
  2193. ucommand=${command^^}
  2194. fi
  2195. uvar="${uprogram}_${ucommand}_OPTS"
  2196. # Let's handle all of the deprecation cases early
  2197. # HADOOP_NAMENODE_OPTS -> HDFS_NAMENODE_OPTS
  2198. depvar="HADOOP_${ucommand}_OPTS"
  2199. if [[ "${depvar}" != "${uvar}" ]]; then
  2200. if [[ -n "${!depvar}" ]]; then
  2201. hadoop_deprecate_envvar "${depvar}" "${uvar}"
  2202. fi
  2203. fi
  2204. if [[ -n ${!uvar} ]]; then
  2205. hadoop_debug "Appending ${uvar} onto HADOOP_OPTS"
  2206. HADOOP_OPTS="${HADOOP_OPTS} ${!uvar}"
  2207. return 0
  2208. fi
  2209. }
  2210. ## @description Add custom (program)_(command)_SECURE_EXTRA_OPTS to HADOOP_OPTS.
  2211. ## @description This *does not* handle the pre-3.x deprecated cases
  2212. ## @audience public
  2213. ## @stability stable
  2214. ## @replaceable yes
  2215. ## @param program
  2216. ## @param subcommand
  2217. ## @return will exit on failure conditions
  2218. function hadoop_subcommand_secure_opts
  2219. {
  2220. declare program=$1
  2221. declare command=$2
  2222. declare uvar
  2223. declare uprogram
  2224. declare ucommand
  2225. if [[ -z "${program}" || -z "${command}" ]]; then
  2226. return 1
  2227. fi
  2228. # HDFS_DATANODE_SECURE_EXTRA_OPTS
  2229. # HDFS_NFS3_SECURE_EXTRA_OPTS
  2230. # ...
  2231. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_EXTRA_OPTS)
  2232. if [[ -n ${!uvar} ]]; then
  2233. hadoop_debug "Appending ${uvar} onto HADOOP_OPTS"
  2234. HADOOP_OPTS="${HADOOP_OPTS} ${!uvar}"
  2235. return 0
  2236. fi
  2237. }
  2238. ## @description Perform the 'hadoop classpath', etc subcommand with the given
  2239. ## @description parameters
  2240. ## @audience private
  2241. ## @stability evolving
  2242. ## @replaceable yes
  2243. ## @param [parameters]
  2244. ## @return will print & exit with no params
  2245. function hadoop_do_classpath_subcommand
  2246. {
  2247. if [[ "$#" -gt 1 ]]; then
  2248. eval "$1"=org.apache.hadoop.util.Classpath
  2249. else
  2250. hadoop_finalize
  2251. echo "${CLASSPATH}"
  2252. exit 0
  2253. fi
  2254. }
  2255. ## @description generic shell script option parser. sets
  2256. ## @description HADOOP_PARSE_COUNTER to set number the
  2257. ## @description caller should shift
  2258. ## @audience private
  2259. ## @stability evolving
  2260. ## @replaceable yes
  2261. ## @param [parameters, typically "$@"]
  2262. function hadoop_parse_args
  2263. {
  2264. HADOOP_DAEMON_MODE="default"
  2265. HADOOP_PARSE_COUNTER=0
  2266. # not all of the options supported here are supported by all commands
  2267. # however these are:
  2268. hadoop_add_option "--config dir" "Hadoop config directory"
  2269. hadoop_add_option "--debug" "turn on shell script debug mode"
  2270. hadoop_add_option "--help" "usage information"
  2271. while true; do
  2272. hadoop_debug "hadoop_parse_args: processing $1"
  2273. case $1 in
  2274. --buildpaths)
  2275. HADOOP_ENABLE_BUILD_PATHS=true
  2276. shift
  2277. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2278. ;;
  2279. --config)
  2280. shift
  2281. confdir=$1
  2282. shift
  2283. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2284. if [[ -d "${confdir}" ]]; then
  2285. HADOOP_CONF_DIR="${confdir}"
  2286. elif [[ -z "${confdir}" ]]; then
  2287. hadoop_error "ERROR: No parameter provided for --config "
  2288. hadoop_exit_with_usage 1
  2289. else
  2290. hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
  2291. hadoop_exit_with_usage 1
  2292. fi
  2293. ;;
  2294. --daemon)
  2295. shift
  2296. HADOOP_DAEMON_MODE=$1
  2297. shift
  2298. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2299. if [[ -z "${HADOOP_DAEMON_MODE}" || \
  2300. ! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
  2301. hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
  2302. hadoop_exit_with_usage 1
  2303. fi
  2304. ;;
  2305. --debug)
  2306. shift
  2307. HADOOP_SHELL_SCRIPT_DEBUG=true
  2308. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2309. ;;
  2310. --help|-help|-h|help|--h|--\?|-\?|\?)
  2311. hadoop_exit_with_usage 0
  2312. ;;
  2313. --hostnames)
  2314. shift
  2315. HADOOP_WORKER_NAMES="$1"
  2316. shift
  2317. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2318. ;;
  2319. --hosts)
  2320. shift
  2321. hadoop_populate_workers_file "$1"
  2322. shift
  2323. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2324. ;;
  2325. --loglevel)
  2326. shift
  2327. # shellcheck disable=SC2034
  2328. HADOOP_LOGLEVEL="$1"
  2329. shift
  2330. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2331. ;;
  2332. --reexec)
  2333. shift
  2334. if [[ "${HADOOP_REEXECED_CMD}" = true ]]; then
  2335. hadoop_error "ERROR: re-exec fork bomb prevention: --reexec already called"
  2336. exit 1
  2337. fi
  2338. HADOOP_REEXECED_CMD=true
  2339. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2340. ;;
  2341. --workers)
  2342. shift
  2343. # shellcheck disable=SC2034
  2344. HADOOP_WORKER_MODE=true
  2345. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2346. ;;
  2347. *)
  2348. break
  2349. ;;
  2350. esac
  2351. done
  2352. hadoop_debug "hadoop_parse: asking caller to skip ${HADOOP_PARSE_COUNTER}"
  2353. }
  2354. ## @description Handle subcommands from main program entries
  2355. ## @audience private
  2356. ## @stability evolving
  2357. ## @replaceable yes
  2358. function hadoop_generic_java_subcmd_handler
  2359. {
  2360. declare priv_outfile
  2361. declare priv_errfile
  2362. declare priv_pidfile
  2363. declare daemon_outfile
  2364. declare daemon_pidfile
  2365. declare secureuser
  2366. # The default/expected way to determine if a daemon is going to run in secure
  2367. # mode is defined by hadoop_detect_priv_subcmd. If this returns true
  2368. # then setup the secure user var and tell the world we're in secure mode
  2369. if hadoop_detect_priv_subcmd "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"; then
  2370. HADOOP_SUBCMD_SECURESERVICE=true
  2371. secureuser=$(hadoop_build_custom_subcmd_var "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}" SECURE_USER)
  2372. if ! hadoop_verify_user_resolves "${secureuser}"; then
  2373. hadoop_error "ERROR: User defined in ${secureuser} (${!secureuser}) does not exist. Aborting."
  2374. exit 1
  2375. fi
  2376. HADOOP_SECURE_USER="${!secureuser}"
  2377. fi
  2378. # check if we're running in secure mode.
  2379. # breaking this up from the above lets 3rd parties
  2380. # do things a bit different
  2381. # secure services require some extra setup
  2382. # if yes, then we need to define all of the priv and daemon stuff
  2383. # if not, then we just need to define daemon stuff.
  2384. # note the daemon vars are purposefully different between the two
  2385. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2386. hadoop_subcommand_secure_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
  2387. hadoop_verify_secure_prereq
  2388. hadoop_setup_secure_service
  2389. priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2390. priv_errfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.err"
  2391. priv_pidfile="${HADOOP_PID_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2392. daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2393. daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2394. else
  2395. daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2396. daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2397. fi
  2398. # are we actually in daemon mode?
  2399. # if yes, use the daemon logger and the appropriate log file.
  2400. if [[ "${HADOOP_DAEMON_MODE}" != "default" ]]; then
  2401. HADOOP_ROOT_LOGGER="${HADOOP_DAEMON_ROOT_LOGGER}"
  2402. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2403. HADOOP_LOGFILE="hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
  2404. else
  2405. HADOOP_LOGFILE="hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
  2406. fi
  2407. fi
  2408. # finish defining the environment: system properties, env vars, class paths, etc.
  2409. hadoop_finalize
  2410. # do the hard work of launching a daemon or just executing our interactive
  2411. # java class
  2412. if [[ "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" = true ]]; then
  2413. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2414. hadoop_secure_daemon_handler \
  2415. "${HADOOP_DAEMON_MODE}" \
  2416. "${HADOOP_SUBCMD}" \
  2417. "${HADOOP_SECURE_CLASSNAME}" \
  2418. "${daemon_pidfile}" \
  2419. "${daemon_outfile}" \
  2420. "${priv_pidfile}" \
  2421. "${priv_outfile}" \
  2422. "${priv_errfile}" \
  2423. "${HADOOP_SUBCMD_ARGS[@]}"
  2424. else
  2425. hadoop_daemon_handler \
  2426. "${HADOOP_DAEMON_MODE}" \
  2427. "${HADOOP_SUBCMD}" \
  2428. "${HADOOP_CLASSNAME}" \
  2429. "${daemon_pidfile}" \
  2430. "${daemon_outfile}" \
  2431. "${HADOOP_SUBCMD_ARGS[@]}"
  2432. fi
  2433. exit $?
  2434. else
  2435. hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "${HADOOP_SUBCMD_ARGS[@]}"
  2436. fi
  2437. }