hadoop-functions.sh 75 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # we need to declare this globally as an array, which can only
  17. # be done outside of a function
  18. declare -a HADOOP_SUBCMD_USAGE
  19. declare -a HADOOP_OPTION_USAGE
  20. declare -a HADOOP_SUBCMD_USAGE_TYPES
  21. ## @description Print a message to stderr
  22. ## @audience public
  23. ## @stability stable
  24. ## @replaceable no
  25. ## @param string
  26. function hadoop_error
  27. {
  28. echo "$*" 1>&2
  29. }
  30. ## @description Print a message to stderr if --debug is turned on
  31. ## @audience public
  32. ## @stability stable
  33. ## @replaceable no
  34. ## @param string
  35. function hadoop_debug
  36. {
  37. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  38. echo "DEBUG: $*" 1>&2
  39. fi
  40. }
  41. ## @description Given a filename or dir, return the absolute version of it
  42. ## @description This works as an alternative to readlink, which isn't
  43. ## @description portable.
  44. ## @audience public
  45. ## @stability stable
  46. ## @param fsobj
  47. ## @replaceable no
  48. ## @return 0 success
  49. ## @return 1 failure
  50. ## @return stdout abspath
  51. function hadoop_abs
  52. {
  53. declare obj=$1
  54. declare dir
  55. declare fn
  56. declare dirret
  57. if [[ ! -e ${obj} ]]; then
  58. return 1
  59. elif [[ -d ${obj} ]]; then
  60. dir=${obj}
  61. else
  62. dir=$(dirname -- "${obj}")
  63. fn=$(basename -- "${obj}")
  64. fn="/${fn}"
  65. fi
  66. dir=$(cd -P -- "${dir}" >/dev/null 2>/dev/null && pwd -P)
  67. dirret=$?
  68. if [[ ${dirret} = 0 ]]; then
  69. echo "${dir}${fn}"
  70. return 0
  71. fi
  72. return 1
  73. }
  74. ## @description Given variable $1 delete $2 from it
  75. ## @audience public
  76. ## @stability stable
  77. ## @replaceable no
  78. function hadoop_delete_entry
  79. {
  80. if [[ ${!1} =~ \ ${2}\ ]] ; then
  81. hadoop_debug "Removing ${2} from ${1}"
  82. eval "${1}"=\""${!1// ${2} }"\"
  83. fi
  84. }
  85. ## @description Given variable $1 add $2 to it
  86. ## @audience public
  87. ## @stability stable
  88. ## @replaceable no
  89. function hadoop_add_entry
  90. {
  91. if [[ ! ${!1} =~ \ ${2}\ ]] ; then
  92. hadoop_debug "Adding ${2} to ${1}"
  93. #shellcheck disable=SC2140
  94. eval "${1}"=\""${!1} ${2} "\"
  95. fi
  96. }
  97. ## @description Given variable $1 determine if $2 is in it
  98. ## @audience public
  99. ## @stability stable
  100. ## @replaceable no
  101. ## @return 0 = yes, 1 = no
  102. function hadoop_verify_entry
  103. {
  104. # this unfortunately can't really be tested by bats. :(
  105. # so if this changes, be aware that unit tests effectively
  106. # do this function in them
  107. [[ ${!1} =~ \ ${2}\ ]]
  108. }
  109. ## @description Check if an array has a given value
  110. ## @audience public
  111. ## @stability stable
  112. ## @replaceable yes
  113. ## @param element
  114. ## @param array
  115. ## @returns 0 = yes
  116. ## @returns 1 = no
  117. function hadoop_array_contains
  118. {
  119. declare element=$1
  120. shift
  121. declare val
  122. if [[ "$#" -eq 0 ]]; then
  123. return 1
  124. fi
  125. for val in "${@}"; do
  126. if [[ "${val}" == "${element}" ]]; then
  127. return 0
  128. fi
  129. done
  130. return 1
  131. }
  132. ## @description Add the `appendstring` if `checkstring` is not
  133. ## @description present in the given array
  134. ## @audience public
  135. ## @stability stable
  136. ## @replaceable yes
  137. ## @param envvar
  138. ## @param appendstring
  139. function hadoop_add_array_param
  140. {
  141. declare arrname=$1
  142. declare add=$2
  143. declare arrref="${arrname}[@]"
  144. declare array=("${!arrref}")
  145. if ! hadoop_array_contains "${add}" "${array[@]}"; then
  146. #shellcheck disable=SC1083,SC2086
  147. eval ${arrname}=\(\"\${array[@]}\" \"${add}\" \)
  148. hadoop_debug "$1 accepted $2"
  149. else
  150. hadoop_debug "$1 declined $2"
  151. fi
  152. }
  153. ## @description Sort an array (must not contain regexps)
  154. ## @description present in the given array
  155. ## @audience public
  156. ## @stability stable
  157. ## @replaceable yes
  158. ## @param arrayvar
  159. function hadoop_sort_array
  160. {
  161. declare arrname=$1
  162. declare arrref="${arrname}[@]"
  163. declare array=("${!arrref}")
  164. declare oifs
  165. declare globstatus
  166. declare -a sa
  167. globstatus=$(set -o | grep noglob | awk '{print $NF}')
  168. set -f
  169. oifs=${IFS}
  170. # shellcheck disable=SC2034
  171. IFS=$'\n' sa=($(sort <<<"${array[*]}"))
  172. # shellcheck disable=SC1083
  173. eval "${arrname}"=\(\"\${sa[@]}\"\)
  174. IFS=${oifs}
  175. if [[ "${globstatus}" = off ]]; then
  176. set +f
  177. fi
  178. }
  179. ## @description Check if we are running with priv
  180. ## @description by default, this implementation looks for
  181. ## @description EUID=0. For OSes that have true priv
  182. ## @description separation, this should be something more complex
  183. ## @audience private
  184. ## @stability evolving
  185. ## @replaceable yes
  186. ## @return 1 = no priv
  187. ## @return 0 = priv
  188. function hadoop_privilege_check
  189. {
  190. [[ "${EUID}" = 0 ]]
  191. }
  192. ## @description Execute a command via su when running as root
  193. ## @description if the given user is found or exit with
  194. ## @description failure if not.
  195. ## @description otherwise just run it. (This is intended to
  196. ## @description be used by the start-*/stop-* scripts.)
  197. ## @audience private
  198. ## @stability evolving
  199. ## @replaceable yes
  200. ## @param user
  201. ## @param commandstring
  202. ## @return exitstatus
  203. function hadoop_su
  204. {
  205. declare user=$1
  206. shift
  207. if hadoop_privilege_check; then
  208. if hadoop_verify_user_resolves user; then
  209. su -l "${user}" -- "$@"
  210. else
  211. hadoop_error "ERROR: Refusing to run as root: ${user} account is not found. Aborting."
  212. return 1
  213. fi
  214. else
  215. "$@"
  216. fi
  217. }
  218. ## @description Execute a command via su when running as root
  219. ## @description with extra support for commands that might
  220. ## @description legitimately start as root (e.g., datanode)
  221. ## @description (This is intended to
  222. ## @description be used by the start-*/stop-* scripts.)
  223. ## @audience private
  224. ## @stability evolving
  225. ## @replaceable no
  226. ## @param user
  227. ## @param commandstring
  228. ## @return exitstatus
  229. function hadoop_uservar_su
  230. {
  231. ## startup matrix:
  232. #
  233. # if $EUID != 0, then exec
  234. # if $EUID =0 then
  235. # if hdfs_subcmd_user is defined, call hadoop_su to exec
  236. # if hdfs_subcmd_user is not defined, error
  237. #
  238. # For secure daemons, this means both the secure and insecure env vars need to be
  239. # defined. e.g., HDFS_DATANODE_USER=root HDFS_DATANODE_SECURE_USER=hdfs
  240. # This function will pick up the "normal" var, switch to that user, then
  241. # execute the command which will then pick up the "secure" version.
  242. #
  243. declare program=$1
  244. declare command=$2
  245. shift 2
  246. declare uprogram
  247. declare ucommand
  248. declare uvar
  249. declare svar
  250. if hadoop_privilege_check; then
  251. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  252. svar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_USER)
  253. if [[ -n "${!uvar}" ]]; then
  254. hadoop_su "${!uvar}" "$@"
  255. elif [[ -n "${!svar}" ]]; then
  256. ## if we are here, then SECURE_USER with no USER defined
  257. ## we are already privileged, so just run the command and hope
  258. ## for the best
  259. "$@"
  260. else
  261. hadoop_error "ERROR: Attempting to operate on ${program} ${command} as root"
  262. hadoop_error "ERROR: but there is no ${uvar} defined. Aborting operation."
  263. return 1
  264. fi
  265. else
  266. "$@"
  267. fi
  268. }
  269. ## @description Add a subcommand to the usage output
  270. ## @audience private
  271. ## @stability evolving
  272. ## @replaceable no
  273. ## @param subcommand
  274. ## @param subcommandtype
  275. ## @param subcommanddesc
  276. function hadoop_add_subcommand
  277. {
  278. declare subcmd=$1
  279. declare subtype=$2
  280. declare text=$3
  281. hadoop_debug "${subcmd} as a ${subtype}"
  282. hadoop_add_array_param HADOOP_SUBCMD_USAGE_TYPES "${subtype}"
  283. # done in this order so that sort works later
  284. HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${subtype}@${text}"
  285. ((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
  286. }
  287. ## @description Add an option to the usage output
  288. ## @audience private
  289. ## @stability evolving
  290. ## @replaceable no
  291. ## @param subcommand
  292. ## @param subcommanddesc
  293. function hadoop_add_option
  294. {
  295. local option=$1
  296. local text=$2
  297. HADOOP_OPTION_USAGE[${HADOOP_OPTION_USAGE_COUNTER}]="${option}@${text}"
  298. ((HADOOP_OPTION_USAGE_COUNTER=HADOOP_OPTION_USAGE_COUNTER+1))
  299. }
  300. ## @description Reset the usage information to blank
  301. ## @audience private
  302. ## @stability evolving
  303. ## @replaceable no
  304. function hadoop_reset_usage
  305. {
  306. HADOOP_SUBCMD_USAGE=()
  307. HADOOP_OPTION_USAGE=()
  308. HADOOP_SUBCMD_USAGE_TYPES=()
  309. HADOOP_SUBCMD_USAGE_COUNTER=0
  310. HADOOP_OPTION_USAGE_COUNTER=0
  311. }
  312. ## @description Print a screen-size aware two-column output
  313. ## @description if reqtype is not null, only print those requested
  314. ## @audience private
  315. ## @stability evolving
  316. ## @replaceable no
  317. ## @param reqtype
  318. ## @param array
  319. function hadoop_generic_columnprinter
  320. {
  321. declare reqtype=$1
  322. shift
  323. declare -a input=("$@")
  324. declare -i i=0
  325. declare -i counter=0
  326. declare line
  327. declare text
  328. declare option
  329. declare giventext
  330. declare -i maxoptsize
  331. declare -i foldsize
  332. declare -a tmpa
  333. declare numcols
  334. declare brup
  335. if [[ -n "${COLUMNS}" ]]; then
  336. numcols=${COLUMNS}
  337. else
  338. numcols=$(tput cols) 2>/dev/null
  339. COLUMNS=${numcols}
  340. fi
  341. if [[ -z "${numcols}"
  342. || ! "${numcols}" =~ ^[0-9]+$ ]]; then
  343. numcols=75
  344. else
  345. ((numcols=numcols-5))
  346. fi
  347. while read -r line; do
  348. tmpa[${counter}]=${line}
  349. ((counter=counter+1))
  350. IFS='@' read -ra brup <<< "${line}"
  351. option="${brup[0]}"
  352. if [[ ${#option} -gt ${maxoptsize} ]]; then
  353. maxoptsize=${#option}
  354. fi
  355. done < <(for text in "${input[@]}"; do
  356. echo "${text}"
  357. done | sort)
  358. i=0
  359. ((foldsize=numcols-maxoptsize))
  360. until [[ $i -eq ${#tmpa[@]} ]]; do
  361. IFS='@' read -ra brup <<< "${tmpa[$i]}"
  362. option="${brup[0]}"
  363. cmdtype="${brup[1]}"
  364. giventext="${brup[2]}"
  365. if [[ -n "${reqtype}" ]]; then
  366. if [[ "${cmdtype}" != "${reqtype}" ]]; then
  367. ((i=i+1))
  368. continue
  369. fi
  370. fi
  371. if [[ -z "${giventext}" ]]; then
  372. giventext=${cmdtype}
  373. fi
  374. while read -r line; do
  375. printf "%-${maxoptsize}s %-s\n" "${option}" "${line}"
  376. option=" "
  377. done < <(echo "${giventext}"| fold -s -w ${foldsize})
  378. ((i=i+1))
  379. done
  380. }
  381. ## @description generate standard usage output
  382. ## @description and optionally takes a class
  383. ## @audience private
  384. ## @stability evolving
  385. ## @replaceable no
  386. ## @param execname
  387. ## @param true|false
  388. ## @param [text to use in place of SUBCOMMAND]
  389. function hadoop_generate_usage
  390. {
  391. declare cmd=$1
  392. declare takesclass=$2
  393. declare subcmdtext=${3:-"SUBCOMMAND"}
  394. declare haveoptions
  395. declare optstring
  396. declare havesubs
  397. declare subcmdstring
  398. declare cmdtype
  399. cmd=${cmd##*/}
  400. if [[ -n "${HADOOP_OPTION_USAGE_COUNTER}"
  401. && "${HADOOP_OPTION_USAGE_COUNTER}" -gt 0 ]]; then
  402. haveoptions=true
  403. optstring=" [OPTIONS]"
  404. fi
  405. if [[ -n "${HADOOP_SUBCMD_USAGE_COUNTER}"
  406. && "${HADOOP_SUBCMD_USAGE_COUNTER}" -gt 0 ]]; then
  407. havesubs=true
  408. subcmdstring=" ${subcmdtext} [${subcmdtext} OPTIONS]"
  409. fi
  410. echo "Usage: ${cmd}${optstring}${subcmdstring}"
  411. if [[ ${takesclass} = true ]]; then
  412. echo " or ${cmd}${optstring} CLASSNAME [CLASSNAME OPTIONS]"
  413. echo " where CLASSNAME is a user-provided Java class"
  414. fi
  415. if [[ "${haveoptions}" = true ]]; then
  416. echo ""
  417. echo " OPTIONS is none or any of:"
  418. echo ""
  419. hadoop_generic_columnprinter "" "${HADOOP_OPTION_USAGE[@]}"
  420. fi
  421. if [[ "${havesubs}" = true ]]; then
  422. echo ""
  423. echo " ${subcmdtext} is one of:"
  424. echo ""
  425. if [[ "${#HADOOP_SUBCMD_USAGE_TYPES[@]}" -gt 0 ]]; then
  426. hadoop_sort_array HADOOP_SUBCMD_USAGE_TYPES
  427. for subtype in "${HADOOP_SUBCMD_USAGE_TYPES[@]}"; do
  428. #shellcheck disable=SC2086
  429. cmdtype="$(tr '[:lower:]' '[:upper:]' <<< ${subtype:0:1})${subtype:1}"
  430. printf "\n %s Commands:\n\n" "${cmdtype}"
  431. hadoop_generic_columnprinter "${subtype}" "${HADOOP_SUBCMD_USAGE[@]}"
  432. done
  433. else
  434. hadoop_generic_columnprinter "" "${HADOOP_SUBCMD_USAGE[@]}"
  435. fi
  436. echo ""
  437. echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
  438. fi
  439. }
  440. ## @description Replace `oldvar` with `newvar` if `oldvar` exists.
  441. ## @audience public
  442. ## @stability stable
  443. ## @replaceable yes
  444. ## @param oldvar
  445. ## @param newvar
  446. function hadoop_deprecate_envvar
  447. {
  448. local oldvar=$1
  449. local newvar=$2
  450. local oldval=${!oldvar}
  451. local newval=${!newvar}
  452. if [[ -n "${oldval}" ]]; then
  453. hadoop_error "WARNING: ${oldvar} has been replaced by ${newvar}. Using value of ${oldvar}."
  454. # shellcheck disable=SC2086
  455. eval ${newvar}=\"${oldval}\"
  456. # shellcheck disable=SC2086
  457. newval=${oldval}
  458. # shellcheck disable=SC2086
  459. eval ${newvar}=\"${newval}\"
  460. fi
  461. }
  462. ## @description Declare `var` being used and print its value.
  463. ## @audience public
  464. ## @stability stable
  465. ## @replaceable yes
  466. ## @param var
  467. function hadoop_using_envvar
  468. {
  469. local var=$1
  470. local val=${!var}
  471. if [[ -n "${val}" ]]; then
  472. hadoop_debug "${var} = ${val}"
  473. fi
  474. }
  475. ## @description Create the directory 'dir'.
  476. ## @audience public
  477. ## @stability stable
  478. ## @replaceable yes
  479. ## @param dir
  480. function hadoop_mkdir
  481. {
  482. local dir=$1
  483. if [[ ! -w "${dir}" ]] && [[ ! -d "${dir}" ]]; then
  484. hadoop_error "WARNING: ${dir} does not exist. Creating."
  485. if ! mkdir -p "${dir}"; then
  486. hadoop_error "ERROR: Unable to create ${dir}. Aborting."
  487. exit 1
  488. fi
  489. fi
  490. }
  491. ## @description Bootstraps the Hadoop shell environment
  492. ## @audience private
  493. ## @stability evolving
  494. ## @replaceable no
  495. function hadoop_bootstrap
  496. {
  497. # the root of the Hadoop installation
  498. # See HADOOP-6255 for the expected directory structure layout
  499. if [[ -n "${DEFAULT_LIBEXEC_DIR}" ]]; then
  500. hadoop_error "WARNING: DEFAULT_LIBEXEC_DIR ignored. It has been replaced by HADOOP_DEFAULT_LIBEXEC_DIR."
  501. fi
  502. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  503. # We can piggyback off of that to figure out where the default
  504. # HADOOP_FREFIX should be. This allows us to run without
  505. # HADOOP_HOME ever being defined by a human! As a consequence
  506. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  507. # env var within Hadoop.
  508. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  509. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  510. exit 1
  511. fi
  512. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  513. HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DEFAULT_PREFIX}
  514. export HADOOP_HOME
  515. #
  516. # short-cuts. vendors may redefine these as well, preferably
  517. # in hadoop-layouts.sh
  518. #
  519. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  520. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  521. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  522. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  523. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  524. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  525. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  526. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  527. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  528. HDDS_DIR=${HDDS_DIR:-"share/hadoop/hdds"}
  529. HDDS_LIB_JARS_DIR=${HDDS_LIB_JARS_DIR:-"share/hadoop/hdds/lib"}
  530. OZONE_DIR=${OZONE_DIR:-"share/hadoop/ozone"}
  531. OZONE_LIB_JARS_DIR=${OZONE_LIB_JARS_DIR:-"share/hadoop/ozone/lib"}
  532. CBLOCK_DIR=${CBLOCK_DIR:-"share/hadoop/cblock"}
  533. CBLOCK_LIB_JARS_DIR=${CBLOCK_LIB_JARS_DIR:-"share/hadoop/cblock/lib"}
  534. HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}}
  535. HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
  536. HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}
  537. # by default, whatever we are about to run doesn't support
  538. # daemonization
  539. HADOOP_SUBCMD_SUPPORTDAEMONIZATION=false
  540. # by default, we have not been self-re-execed
  541. HADOOP_REEXECED_CMD=false
  542. HADOOP_SUBCMD_SECURESERVICE=false
  543. # This is the default we claim in hadoop-env.sh
  544. JSVC_HOME=${JSVC_HOME:-"/usr/bin"}
  545. # usage output set to zero
  546. hadoop_reset_usage
  547. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  548. # defaults
  549. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  550. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  551. }
  552. ## @description Locate Hadoop's configuration directory
  553. ## @audience private
  554. ## @stability evolving
  555. ## @replaceable no
  556. function hadoop_find_confdir
  557. {
  558. local conf_dir
  559. # An attempt at compatibility with some Hadoop 1.x
  560. # installs.
  561. if [[ -e "${HADOOP_HOME}/conf/hadoop-env.sh" ]]; then
  562. conf_dir="conf"
  563. else
  564. conf_dir="etc/hadoop"
  565. fi
  566. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_HOME}/${conf_dir}}"
  567. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  568. }
  569. ## @description Validate ${HADOOP_CONF_DIR}
  570. ## @audience public
  571. ## @stability stable
  572. ## @replaceable yes
  573. ## @return will exit on failure conditions
  574. function hadoop_verify_confdir
  575. {
  576. # Check only log4j.properties by default.
  577. # --loglevel does not work without logger settings in log4j.log4j.properties.
  578. if [[ ! -f "${HADOOP_CONF_DIR}/log4j.properties" ]]; then
  579. hadoop_error "WARNING: log4j.properties is not found. HADOOP_CONF_DIR may be incomplete."
  580. fi
  581. }
  582. ## @description Import the hadoop-env.sh settings
  583. ## @audience private
  584. ## @stability evolving
  585. ## @replaceable no
  586. function hadoop_exec_hadoopenv
  587. {
  588. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  589. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  590. export HADOOP_ENV_PROCESSED=true
  591. # shellcheck source=./hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
  592. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  593. fi
  594. fi
  595. }
  596. ## @description Import the replaced functions
  597. ## @audience private
  598. ## @stability evolving
  599. ## @replaceable no
  600. function hadoop_exec_userfuncs
  601. {
  602. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  603. # shellcheck disable=SC1090
  604. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  605. fi
  606. }
  607. ## @description Read the user's settings. This provides for users to
  608. ## @description override and/or append hadoop-env.sh. It is not meant
  609. ## @description as a complete system override.
  610. ## @audience private
  611. ## @stability evolving
  612. ## @replaceable yes
  613. function hadoop_exec_user_hadoopenv
  614. {
  615. if [[ -f "${HOME}/.hadoop-env" ]]; then
  616. hadoop_debug "Applying the user's .hadoop-env"
  617. # shellcheck disable=SC1090
  618. . "${HOME}/.hadoop-env"
  619. fi
  620. }
  621. ## @description Read the user's settings. This provides for users to
  622. ## @description run Hadoop Shell API after system bootstrap
  623. ## @audience private
  624. ## @stability evolving
  625. ## @replaceable yes
  626. function hadoop_exec_hadooprc
  627. {
  628. if [[ -f "${HOME}/.hadooprc" ]]; then
  629. hadoop_debug "Applying the user's .hadooprc"
  630. # shellcheck disable=SC1090
  631. . "${HOME}/.hadooprc"
  632. fi
  633. }
  634. ## @description Import shellprofile.d content
  635. ## @audience private
  636. ## @stability evolving
  637. ## @replaceable yes
  638. function hadoop_import_shellprofiles
  639. {
  640. local i
  641. local files1
  642. local files2
  643. if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
  644. files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
  645. hadoop_debug "shellprofiles: ${files1[*]}"
  646. else
  647. hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
  648. fi
  649. if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
  650. files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
  651. fi
  652. # enable bundled shellprofiles that come
  653. # from hadoop-tools. This converts the user-facing HADOOP_OPTIONAL_TOOLS
  654. # to the HADOOP_TOOLS_OPTIONS that the shell profiles expect.
  655. # See dist-tools-hooks-maker for how the example HADOOP_OPTIONAL_TOOLS
  656. # gets populated into hadoop-env.sh
  657. for i in ${HADOOP_OPTIONAL_TOOLS//,/ }; do
  658. hadoop_add_entry HADOOP_TOOLS_OPTIONS "${i}"
  659. done
  660. for i in "${files1[@]}" "${files2[@]}"
  661. do
  662. if [[ -n "${i}"
  663. && -f "${i}" ]]; then
  664. hadoop_debug "Profiles: importing ${i}"
  665. # shellcheck disable=SC1090
  666. . "${i}"
  667. fi
  668. done
  669. }
  670. ## @description Initialize the registered shell profiles
  671. ## @audience private
  672. ## @stability evolving
  673. ## @replaceable yes
  674. function hadoop_shellprofiles_init
  675. {
  676. local i
  677. for i in ${HADOOP_SHELL_PROFILES}
  678. do
  679. if declare -F _${i}_hadoop_init >/dev/null ; then
  680. hadoop_debug "Profiles: ${i} init"
  681. # shellcheck disable=SC2086
  682. _${i}_hadoop_init
  683. fi
  684. done
  685. }
  686. ## @description Apply the shell profile classpath additions
  687. ## @audience private
  688. ## @stability evolving
  689. ## @replaceable yes
  690. function hadoop_shellprofiles_classpath
  691. {
  692. local i
  693. for i in ${HADOOP_SHELL_PROFILES}
  694. do
  695. if declare -F _${i}_hadoop_classpath >/dev/null ; then
  696. hadoop_debug "Profiles: ${i} classpath"
  697. # shellcheck disable=SC2086
  698. _${i}_hadoop_classpath
  699. fi
  700. done
  701. }
  702. ## @description Apply the shell profile native library additions
  703. ## @audience private
  704. ## @stability evolving
  705. ## @replaceable yes
  706. function hadoop_shellprofiles_nativelib
  707. {
  708. local i
  709. for i in ${HADOOP_SHELL_PROFILES}
  710. do
  711. if declare -F _${i}_hadoop_nativelib >/dev/null ; then
  712. hadoop_debug "Profiles: ${i} nativelib"
  713. # shellcheck disable=SC2086
  714. _${i}_hadoop_nativelib
  715. fi
  716. done
  717. }
  718. ## @description Apply the shell profile final configuration
  719. ## @audience private
  720. ## @stability evolving
  721. ## @replaceable yes
  722. function hadoop_shellprofiles_finalize
  723. {
  724. local i
  725. for i in ${HADOOP_SHELL_PROFILES}
  726. do
  727. if declare -F _${i}_hadoop_finalize >/dev/null ; then
  728. hadoop_debug "Profiles: ${i} finalize"
  729. # shellcheck disable=SC2086
  730. _${i}_hadoop_finalize
  731. fi
  732. done
  733. }
  734. ## @description Initialize the Hadoop shell environment, now that
  735. ## @description user settings have been imported
  736. ## @audience private
  737. ## @stability evolving
  738. ## @replaceable no
  739. function hadoop_basic_init
  740. {
  741. # Some of these are also set in hadoop-env.sh.
  742. # we still set them here just in case hadoop-env.sh is
  743. # broken in some way, set up defaults, etc.
  744. #
  745. # but it is important to note that if you update these
  746. # you also need to update hadoop-env.sh as well!!!
  747. CLASSPATH=""
  748. hadoop_debug "Initialize CLASSPATH"
  749. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  750. [[ -d "${HADOOP_HOME}/${HADOOP_COMMON_DIR}" ]]; then
  751. export HADOOP_COMMON_HOME="${HADOOP_HOME}"
  752. fi
  753. # default policy file for service-level authorization
  754. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  755. # define HADOOP_HDFS_HOME
  756. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  757. [[ -d "${HADOOP_HOME}/${HDFS_DIR}" ]]; then
  758. export HADOOP_HDFS_HOME="${HADOOP_HOME}"
  759. fi
  760. # define HADOOP_YARN_HOME
  761. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  762. [[ -d "${HADOOP_HOME}/${YARN_DIR}" ]]; then
  763. export HADOOP_YARN_HOME="${HADOOP_HOME}"
  764. fi
  765. # define HADOOP_MAPRED_HOME
  766. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  767. [[ -d "${HADOOP_HOME}/${MAPRED_DIR}" ]]; then
  768. export HADOOP_MAPRED_HOME="${HADOOP_HOME}"
  769. fi
  770. if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
  771. hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
  772. exit 1
  773. fi
  774. if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
  775. hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
  776. exit 1
  777. fi
  778. if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
  779. hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
  780. exit 1
  781. fi
  782. if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
  783. hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
  784. exit 1
  785. fi
  786. # if for some reason the shell doesn't have $USER defined
  787. # (e.g., ssh'd in to execute a command)
  788. # let's get the effective username and use that
  789. USER=${USER:-$(id -nu)}
  790. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  791. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_HOME}/logs"}
  792. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  793. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  794. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  795. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  796. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  797. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  798. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  799. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  800. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  801. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  802. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  803. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  804. }
  805. ## @description Set the worker support information to the contents
  806. ## @description of `filename`
  807. ## @audience public
  808. ## @stability stable
  809. ## @replaceable no
  810. ## @param filename
  811. ## @return will exit if file does not exist
  812. function hadoop_populate_workers_file
  813. {
  814. local workersfile=$1
  815. shift
  816. if [[ -f "${workersfile}" ]]; then
  817. HADOOP_WORKERS="${workersfile}"
  818. elif [[ -f "${HADOOP_CONF_DIR}/${workersfile}" ]]; then
  819. HADOOP_WORKERS="${HADOOP_CONF_DIR}/${workersfile}"
  820. else
  821. hadoop_error "ERROR: Cannot find hosts file \"${workersfile}\""
  822. hadoop_exit_with_usage 1
  823. fi
  824. }
  825. ## @description Rotates the given `file` until `number` of
  826. ## @description files exist.
  827. ## @audience public
  828. ## @stability stable
  829. ## @replaceable no
  830. ## @param filename
  831. ## @param [number]
  832. ## @return $? will contain last mv's return value
  833. function hadoop_rotate_log
  834. {
  835. #
  836. # Users are likely to replace this one for something
  837. # that gzips or uses dates or who knows what.
  838. #
  839. # be aware that &1 and &2 might go through here
  840. # so don't do anything too crazy...
  841. #
  842. local log=$1;
  843. local num=${2:-5};
  844. if [[ -f "${log}" ]]; then # rotate logs
  845. while [[ ${num} -gt 1 ]]; do
  846. #shellcheck disable=SC2086
  847. let prev=${num}-1
  848. if [[ -f "${log}.${prev}" ]]; then
  849. mv "${log}.${prev}" "${log}.${num}"
  850. fi
  851. num=${prev}
  852. done
  853. mv "${log}" "${log}.${num}"
  854. fi
  855. }
  856. ## @description Via ssh, log into `hostname` and run `command`
  857. ## @audience private
  858. ## @stability evolving
  859. ## @replaceable yes
  860. ## @param hostname
  861. ## @param command
  862. ## @param [...]
  863. function hadoop_actual_ssh
  864. {
  865. # we are passing this function to xargs
  866. # should get hostname followed by rest of command line
  867. local worker=$1
  868. shift
  869. # shellcheck disable=SC2086
  870. ssh ${HADOOP_SSH_OPTS} ${worker} $"${@// /\\ }" 2>&1 | sed "s/^/$worker: /"
  871. }
  872. ## @description Connect to ${HADOOP_WORKERS} or ${HADOOP_WORKER_NAMES}
  873. ## @description and execute command.
  874. ## @audience private
  875. ## @stability evolving
  876. ## @replaceable yes
  877. ## @param command
  878. ## @param [...]
  879. function hadoop_connect_to_hosts
  880. {
  881. # shellcheck disable=SC2124
  882. local params="$@"
  883. local worker_file
  884. local tmpslvnames
  885. #
  886. # ssh (or whatever) to a host
  887. #
  888. # User can specify hostnames or a file where the hostnames are (not both)
  889. if [[ -n "${HADOOP_WORKERS}" && -n "${HADOOP_WORKER_NAMES}" ]] ; then
  890. hadoop_error "ERROR: Both HADOOP_WORKERS and HADOOP_WORKER_NAMES were defined. Aborting."
  891. exit 1
  892. elif [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  893. if [[ -n "${HADOOP_WORKERS}" ]]; then
  894. worker_file=${HADOOP_WORKERS}
  895. elif [[ -f "${HADOOP_CONF_DIR}/workers" ]]; then
  896. worker_file=${HADOOP_CONF_DIR}/workers
  897. elif [[ -f "${HADOOP_CONF_DIR}/slaves" ]]; then
  898. hadoop_error "WARNING: 'slaves' file has been deprecated. Please use 'workers' file instead."
  899. worker_file=${HADOOP_CONF_DIR}/slaves
  900. fi
  901. fi
  902. # if pdsh is available, let's use it. otherwise default
  903. # to a loop around ssh. (ugh)
  904. if [[ -e '/usr/bin/pdsh' ]]; then
  905. if [[ -z "${HADOOP_WORKER_NAMES}" ]] ; then
  906. # if we were given a file, just let pdsh deal with it.
  907. # shellcheck disable=SC2086
  908. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  909. -f "${HADOOP_SSH_PARALLEL}" -w ^"${worker_file}" $"${@// /\\ }" 2>&1
  910. else
  911. # no spaces allowed in the pdsh arg host list
  912. # shellcheck disable=SC2086
  913. tmpslvnames=$(echo ${HADOOP_WORKER_NAMES} | tr -s ' ' ,)
  914. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  915. -f "${HADOOP_SSH_PARALLEL}" \
  916. -w "${tmpslvnames}" $"${@// /\\ }" 2>&1
  917. fi
  918. else
  919. if [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  920. HADOOP_WORKER_NAMES=$(sed 's/#.*$//;/^$/d' "${worker_file}")
  921. fi
  922. hadoop_connect_to_hosts_without_pdsh "${params}"
  923. fi
  924. }
  925. ## @description Connect to ${HADOOP_WORKER_NAMES} and execute command
  926. ## @description under the environment which does not support pdsh.
  927. ## @audience private
  928. ## @stability evolving
  929. ## @replaceable yes
  930. ## @param command
  931. ## @param [...]
  932. function hadoop_connect_to_hosts_without_pdsh
  933. {
  934. # shellcheck disable=SC2124
  935. local params="$@"
  936. local workers=(${HADOOP_WORKER_NAMES})
  937. for (( i = 0; i < ${#workers[@]}; i++ ))
  938. do
  939. if (( i != 0 && i % HADOOP_SSH_PARALLEL == 0 )); then
  940. wait
  941. fi
  942. # shellcheck disable=SC2086
  943. hadoop_actual_ssh "${workers[$i]}" ${params} &
  944. done
  945. wait
  946. }
  947. ## @description Utility routine to handle --workers mode
  948. ## @audience private
  949. ## @stability evolving
  950. ## @replaceable yes
  951. ## @param commandarray
  952. function hadoop_common_worker_mode_execute
  953. {
  954. #
  955. # input should be the command line as given by the user
  956. # in the form of an array
  957. #
  958. local argv=("$@")
  959. # if --workers is still on the command line, remove it
  960. # to prevent loops
  961. # Also remove --hostnames and --hosts along with arg values
  962. local argsSize=${#argv[@]};
  963. for (( i = 0; i < argsSize; i++ ))
  964. do
  965. if [[ "${argv[$i]}" =~ ^--workers$ ]]; then
  966. unset argv[$i]
  967. elif [[ "${argv[$i]}" =~ ^--hostnames$ ]] ||
  968. [[ "${argv[$i]}" =~ ^--hosts$ ]]; then
  969. unset argv[$i];
  970. let i++;
  971. unset argv[$i];
  972. fi
  973. done
  974. if [[ ${QATESTMODE} = true ]]; then
  975. echo "${argv[@]}"
  976. return
  977. fi
  978. hadoop_connect_to_hosts -- "${argv[@]}"
  979. }
  980. ## @description Verify that a shell command was passed a valid
  981. ## @description class name
  982. ## @audience public
  983. ## @stability stable
  984. ## @replaceable yes
  985. ## @param classname
  986. ## @return 0 = success
  987. ## @return 1 = failure w/user message
  988. function hadoop_validate_classname
  989. {
  990. local class=$1
  991. shift 1
  992. if [[ ! ${class} =~ \. ]]; then
  993. # assuming the arg is typo of command if it does not conatain ".".
  994. # class belonging to no package is not allowed as a result.
  995. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  996. return 1
  997. fi
  998. return 0
  999. }
  1000. ## @description Append the `appendstring` if `checkstring` is not
  1001. ## @description present in the given `envvar`
  1002. ## @audience public
  1003. ## @stability stable
  1004. ## @replaceable yes
  1005. ## @param envvar
  1006. ## @param checkstring
  1007. ## @param appendstring
  1008. function hadoop_add_param
  1009. {
  1010. #
  1011. # general param dedupe..
  1012. # $1 is what we are adding to
  1013. # $2 is the name of what we want to add (key)
  1014. # $3 is the key+value of what we're adding
  1015. #
  1016. # doing it this way allows us to support all sorts of
  1017. # different syntaxes, just so long as they are space
  1018. # delimited
  1019. #
  1020. if [[ ! ${!1} =~ $2 ]] ; then
  1021. #shellcheck disable=SC2140
  1022. eval "$1"="'${!1} $3'"
  1023. if [[ ${!1:0:1} = ' ' ]]; then
  1024. #shellcheck disable=SC2140
  1025. eval "$1"="'${!1# }'"
  1026. fi
  1027. hadoop_debug "$1 accepted $3"
  1028. else
  1029. hadoop_debug "$1 declined $3"
  1030. fi
  1031. }
  1032. ## @description Register the given `shellprofile` to the Hadoop
  1033. ## @description shell subsystem
  1034. ## @audience public
  1035. ## @stability stable
  1036. ## @replaceable yes
  1037. ## @param shellprofile
  1038. function hadoop_add_profile
  1039. {
  1040. # shellcheck disable=SC2086
  1041. hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
  1042. }
  1043. ## @description Add a file system object (directory, file,
  1044. ## @description wildcard, ...) to the classpath. Optionally provide
  1045. ## @description a hint as to where in the classpath it should go.
  1046. ## @audience public
  1047. ## @stability stable
  1048. ## @replaceable yes
  1049. ## @param object
  1050. ## @param [before|after]
  1051. ## @return 0 = success (added or duplicate)
  1052. ## @return 1 = failure (doesn't exist or some other reason)
  1053. function hadoop_add_classpath
  1054. {
  1055. # However, with classpath (& JLP), we can do dedupe
  1056. # along with some sanity checking (e.g., missing directories)
  1057. # since we have a better idea of what is legal
  1058. #
  1059. # for wildcard at end, we can
  1060. # at least check the dir exists
  1061. if [[ $1 =~ ^.*\*$ ]]; then
  1062. local mp
  1063. mp=$(dirname "$1")
  1064. if [[ ! -d "${mp}" ]]; then
  1065. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  1066. return 1
  1067. fi
  1068. # no wildcard in the middle, so check existence
  1069. # (doesn't matter *what* it is)
  1070. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  1071. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  1072. return 1
  1073. fi
  1074. if [[ -z "${CLASSPATH}" ]]; then
  1075. CLASSPATH=$1
  1076. hadoop_debug "Initial CLASSPATH=$1"
  1077. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  1078. if [[ "$2" = "before" ]]; then
  1079. CLASSPATH="$1:${CLASSPATH}"
  1080. hadoop_debug "Prepend CLASSPATH: $1"
  1081. else
  1082. CLASSPATH+=:$1
  1083. hadoop_debug "Append CLASSPATH: $1"
  1084. fi
  1085. else
  1086. hadoop_debug "Dupe CLASSPATH: $1"
  1087. fi
  1088. return 0
  1089. }
  1090. ## @description Add a file system object (directory, file,
  1091. ## @description wildcard, ...) to the colonpath. Optionally provide
  1092. ## @description a hint as to where in the colonpath it should go.
  1093. ## @description Prior to adding, objects are checked for duplication
  1094. ## @description and check for existence. Many other functions use
  1095. ## @description this function as their base implementation
  1096. ## @description including `hadoop_add_javalibpath` and `hadoop_add_ldlibpath`.
  1097. ## @audience public
  1098. ## @stability stable
  1099. ## @replaceable yes
  1100. ## @param envvar
  1101. ## @param object
  1102. ## @param [before|after]
  1103. ## @return 0 = success (added or duplicate)
  1104. ## @return 1 = failure (doesn't exist or some other reason)
  1105. function hadoop_add_colonpath
  1106. {
  1107. # this is CLASSPATH, JLP, etc but with dedupe but no
  1108. # other checking
  1109. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  1110. if [[ -z "${!1}" ]]; then
  1111. # shellcheck disable=SC2086
  1112. eval $1="'$2'"
  1113. hadoop_debug "Initial colonpath($1): $2"
  1114. elif [[ "$3" = "before" ]]; then
  1115. # shellcheck disable=SC2086
  1116. eval $1="'$2:${!1}'"
  1117. hadoop_debug "Prepend colonpath($1): $2"
  1118. else
  1119. # shellcheck disable=SC2086
  1120. eval $1+=":'$2'"
  1121. hadoop_debug "Append colonpath($1): $2"
  1122. fi
  1123. return 0
  1124. fi
  1125. hadoop_debug "Rejected colonpath($1): $2"
  1126. return 1
  1127. }
  1128. ## @description Add a file system object (directory, file,
  1129. ## @description wildcard, ...) to the Java JNI path. Optionally
  1130. ## @description provide a hint as to where in the Java JNI path
  1131. ## @description it should go.
  1132. ## @audience public
  1133. ## @stability stable
  1134. ## @replaceable yes
  1135. ## @param object
  1136. ## @param [before|after]
  1137. ## @return 0 = success (added or duplicate)
  1138. ## @return 1 = failure (doesn't exist or some other reason)
  1139. function hadoop_add_javalibpath
  1140. {
  1141. # specialized function for a common use case
  1142. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  1143. }
  1144. ## @description Add a file system object (directory, file,
  1145. ## @description wildcard, ...) to the LD_LIBRARY_PATH. Optionally
  1146. ## @description provide a hint as to where in the LD_LIBRARY_PATH
  1147. ## @description it should go.
  1148. ## @audience public
  1149. ## @stability stable
  1150. ## @replaceable yes
  1151. ## @param object
  1152. ## @param [before|after]
  1153. ## @return 0 = success (added or duplicate)
  1154. ## @return 1 = failure (doesn't exist or some other reason)
  1155. function hadoop_add_ldlibpath
  1156. {
  1157. local status
  1158. # specialized function for a common use case
  1159. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  1160. status=$?
  1161. # note that we export this
  1162. export LD_LIBRARY_PATH
  1163. return ${status}
  1164. }
  1165. ## @description Add the common/core Hadoop components to the
  1166. ## @description environment
  1167. ## @audience private
  1168. ## @stability evolving
  1169. ## @replaceable yes
  1170. ## @returns 1 on failure, may exit
  1171. ## @returns 0 on success
  1172. function hadoop_add_common_to_classpath
  1173. {
  1174. #
  1175. # get all of the common jars+config in the path
  1176. #
  1177. if [[ -z "${HADOOP_COMMON_HOME}"
  1178. || -z "${HADOOP_COMMON_DIR}"
  1179. || -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
  1180. hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
  1181. hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
  1182. hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
  1183. hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
  1184. exit 1
  1185. fi
  1186. # developers
  1187. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  1188. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  1189. fi
  1190. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  1191. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  1192. }
  1193. ## @description Run libexec/tools/module.sh to add to the classpath
  1194. ## @description environment
  1195. ## @audience private
  1196. ## @stability evolving
  1197. ## @replaceable yes
  1198. ## @param module
  1199. function hadoop_add_to_classpath_tools
  1200. {
  1201. declare module=$1
  1202. if [[ -f "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh" ]]; then
  1203. # shellcheck disable=SC1090
  1204. . "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh"
  1205. else
  1206. hadoop_error "ERROR: Tools helper ${HADOOP_LIBEXEC_DIR}/tools/${module}.sh was not found."
  1207. fi
  1208. if declare -f hadoop_classpath_tools_${module} >/dev/null 2>&1; then
  1209. "hadoop_classpath_tools_${module}"
  1210. fi
  1211. }
  1212. ## @description Add the user's custom classpath settings to the
  1213. ## @description environment
  1214. ## @audience private
  1215. ## @stability evolving
  1216. ## @replaceable yes
  1217. function hadoop_add_to_classpath_userpath
  1218. {
  1219. # Add the user-specified HADOOP_CLASSPATH to the
  1220. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  1221. # is not set.
  1222. # Add it first or last depending on if user has
  1223. # set env-var HADOOP_USER_CLASSPATH_FIRST
  1224. # we'll also dedupe it, because we're cool like that.
  1225. #
  1226. declare -a array
  1227. declare -i c=0
  1228. declare -i j
  1229. declare -i i
  1230. declare idx
  1231. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  1232. # I wonder if Java runs on VMS.
  1233. for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  1234. array[${c}]=${idx}
  1235. ((c=c+1))
  1236. done
  1237. # bats gets confused by j getting set to 0
  1238. ((j=c-1)) || ${QATESTMODE}
  1239. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  1240. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  1241. for ((i=0; i<=j; i++)); do
  1242. hadoop_add_classpath "${array[$i]}" after
  1243. done
  1244. else
  1245. for ((i=j; i>=0; i--)); do
  1246. hadoop_add_classpath "${array[$i]}" before
  1247. done
  1248. fi
  1249. fi
  1250. fi
  1251. }
  1252. ## @description Routine to configure any OS-specific settings.
  1253. ## @audience public
  1254. ## @stability stable
  1255. ## @replaceable yes
  1256. ## @return may exit on failure conditions
  1257. function hadoop_os_tricks
  1258. {
  1259. local bindv6only
  1260. HADOOP_IS_CYGWIN=false
  1261. case ${HADOOP_OS_TYPE} in
  1262. Darwin)
  1263. if [[ -z "${JAVA_HOME}" ]]; then
  1264. if [[ -x /usr/libexec/java_home ]]; then
  1265. JAVA_HOME="$(/usr/libexec/java_home)"
  1266. export JAVA_HOME
  1267. else
  1268. JAVA_HOME=/Library/Java/Home
  1269. export JAVA_HOME
  1270. fi
  1271. fi
  1272. ;;
  1273. Linux)
  1274. # Newer versions of glibc use an arena memory allocator that
  1275. # causes virtual # memory usage to explode. This interacts badly
  1276. # with the many threads that we use in Hadoop. Tune the variable
  1277. # down to prevent vmem explosion.
  1278. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  1279. # we put this in QA test mode off so that non-Linux can test
  1280. if [[ "${QATESTMODE}" = true ]]; then
  1281. return
  1282. fi
  1283. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  1284. # undocumented in hadoop-env.sh because we don't want users to
  1285. # shoot themselves in the foot while devs make IPv6 work.
  1286. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  1287. if [[ -n "${bindv6only}" ]] &&
  1288. [[ "${bindv6only}" -eq "1" ]] &&
  1289. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  1290. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  1291. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  1292. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  1293. exit 1
  1294. fi
  1295. ;;
  1296. CYGWIN*)
  1297. # Flag that we're running on Cygwin to trigger path translation later.
  1298. HADOOP_IS_CYGWIN=true
  1299. ;;
  1300. esac
  1301. }
  1302. ## @description Configure/verify ${JAVA_HOME}
  1303. ## @audience public
  1304. ## @stability stable
  1305. ## @replaceable yes
  1306. ## @return may exit on failure conditions
  1307. function hadoop_java_setup
  1308. {
  1309. # Bail if we did not detect it
  1310. if [[ -z "${JAVA_HOME}" ]]; then
  1311. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  1312. exit 1
  1313. fi
  1314. if [[ ! -d "${JAVA_HOME}" ]]; then
  1315. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  1316. exit 1
  1317. fi
  1318. JAVA="${JAVA_HOME}/bin/java"
  1319. if [[ ! -x "$JAVA" ]]; then
  1320. hadoop_error "ERROR: $JAVA is not executable."
  1321. exit 1
  1322. fi
  1323. }
  1324. ## @description Finish Java JNI paths prior to execution
  1325. ## @audience private
  1326. ## @stability evolving
  1327. ## @replaceable yes
  1328. function hadoop_finalize_libpaths
  1329. {
  1330. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1331. hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
  1332. hadoop_add_param HADOOP_OPTS java.library.path \
  1333. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1334. export LD_LIBRARY_PATH
  1335. fi
  1336. }
  1337. ## @description Finish Java heap parameters prior to execution
  1338. ## @audience private
  1339. ## @stability evolving
  1340. ## @replaceable yes
  1341. function hadoop_finalize_hadoop_heap
  1342. {
  1343. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  1344. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  1345. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  1346. fi
  1347. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  1348. fi
  1349. # backwards compatibility
  1350. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  1351. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  1352. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  1353. fi
  1354. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  1355. fi
  1356. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  1357. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  1358. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  1359. fi
  1360. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  1361. fi
  1362. }
  1363. ## @description Converts the contents of the variable name
  1364. ## @description `varnameref` into the equivalent Windows path.
  1365. ## @description If the second parameter is true, then `varnameref`
  1366. ## @description is treated as though it was a path list.
  1367. ## @audience public
  1368. ## @stability stable
  1369. ## @replaceable yes
  1370. ## @param varnameref
  1371. ## @param [true]
  1372. function hadoop_translate_cygwin_path
  1373. {
  1374. if [[ "${HADOOP_IS_CYGWIN}" = "true" ]]; then
  1375. if [[ "$2" = "true" ]]; then
  1376. #shellcheck disable=SC2016
  1377. eval "$1"='$(cygpath -p -w "${!1}" 2>/dev/null)'
  1378. else
  1379. #shellcheck disable=SC2016
  1380. eval "$1"='$(cygpath -w "${!1}" 2>/dev/null)'
  1381. fi
  1382. fi
  1383. }
  1384. ## @description Adds the HADOOP_CLIENT_OPTS variable to
  1385. ## @description HADOOP_OPTS if HADOOP_SUBCMD_SUPPORTDAEMONIZATION is false
  1386. ## @audience public
  1387. ## @stability stable
  1388. ## @replaceable yes
  1389. function hadoop_add_client_opts
  1390. {
  1391. if [[ "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" = false
  1392. || -z "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
  1393. hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
  1394. HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
  1395. fi
  1396. }
  1397. ## @description Finish configuring Hadoop specific system properties
  1398. ## @description prior to executing Java
  1399. ## @audience private
  1400. ## @stability evolving
  1401. ## @replaceable yes
  1402. function hadoop_finalize_hadoop_opts
  1403. {
  1404. hadoop_translate_cygwin_path HADOOP_LOG_DIR
  1405. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  1406. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  1407. hadoop_translate_cygwin_path HADOOP_HOME
  1408. export HADOOP_HOME
  1409. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1410. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  1411. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  1412. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  1413. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  1414. }
  1415. ## @description Finish Java classpath prior to execution
  1416. ## @audience private
  1417. ## @stability evolving
  1418. ## @replaceable yes
  1419. function hadoop_finalize_classpath
  1420. {
  1421. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  1422. # user classpath gets added at the last minute. this allows
  1423. # override of CONF dirs and more
  1424. hadoop_add_to_classpath_userpath
  1425. hadoop_translate_cygwin_path CLASSPATH true
  1426. }
  1427. ## @description Finish all the remaining environment settings prior
  1428. ## @description to executing Java. This is a wrapper that calls
  1429. ## @description the other `finalize` routines.
  1430. ## @audience private
  1431. ## @stability evolving
  1432. ## @replaceable yes
  1433. function hadoop_finalize
  1434. {
  1435. hadoop_shellprofiles_finalize
  1436. hadoop_finalize_classpath
  1437. hadoop_finalize_libpaths
  1438. hadoop_finalize_hadoop_heap
  1439. hadoop_finalize_hadoop_opts
  1440. hadoop_translate_cygwin_path HADOOP_HOME
  1441. hadoop_translate_cygwin_path HADOOP_CONF_DIR
  1442. hadoop_translate_cygwin_path HADOOP_COMMON_HOME
  1443. hadoop_translate_cygwin_path HADOOP_HDFS_HOME
  1444. hadoop_translate_cygwin_path HADOOP_YARN_HOME
  1445. hadoop_translate_cygwin_path HADOOP_MAPRED_HOME
  1446. }
  1447. ## @description Print usage information and exit with the passed
  1448. ## @description `exitcode`
  1449. ## @audience public
  1450. ## @stability stable
  1451. ## @replaceable no
  1452. ## @param exitcode
  1453. ## @return This function will always exit.
  1454. function hadoop_exit_with_usage
  1455. {
  1456. local exitcode=$1
  1457. if [[ -z $exitcode ]]; then
  1458. exitcode=1
  1459. fi
  1460. # shellcheck disable=SC2034
  1461. if declare -F hadoop_usage >/dev/null ; then
  1462. hadoop_usage
  1463. elif [[ -x /usr/bin/cowsay ]]; then
  1464. /usr/bin/cowsay -f elephant "Sorry, no help available."
  1465. else
  1466. hadoop_error "Sorry, no help available."
  1467. fi
  1468. exit $exitcode
  1469. }
  1470. ## @description Verify that prerequisites have been met prior to
  1471. ## @description excuting a privileged program.
  1472. ## @audience private
  1473. ## @stability evolving
  1474. ## @replaceable yes
  1475. ## @return This routine may exit.
  1476. function hadoop_verify_secure_prereq
  1477. {
  1478. # if you are on an OS like Illumos that has functional roles
  1479. # and you are using pfexec, you'll probably want to change
  1480. # this.
  1481. if ! hadoop_privilege_check && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  1482. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  1483. exit 1
  1484. else
  1485. return 0
  1486. fi
  1487. }
  1488. ## @audience private
  1489. ## @stability evolving
  1490. ## @replaceable yes
  1491. function hadoop_setup_secure_service
  1492. {
  1493. # need a more complicated setup? replace me!
  1494. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  1495. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  1496. }
  1497. ## @audience private
  1498. ## @stability evolving
  1499. ## @replaceable yes
  1500. function hadoop_verify_piddir
  1501. {
  1502. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  1503. hadoop_error "No pid directory defined."
  1504. exit 1
  1505. fi
  1506. hadoop_mkdir "${HADOOP_PID_DIR}"
  1507. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1508. if [[ $? -gt 0 ]]; then
  1509. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  1510. exit 1
  1511. fi
  1512. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1513. }
  1514. ## @audience private
  1515. ## @stability evolving
  1516. ## @replaceable yes
  1517. function hadoop_verify_logdir
  1518. {
  1519. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  1520. hadoop_error "No log directory defined."
  1521. exit 1
  1522. fi
  1523. hadoop_mkdir "${HADOOP_LOG_DIR}"
  1524. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1525. if [[ $? -gt 0 ]]; then
  1526. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  1527. exit 1
  1528. fi
  1529. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1530. }
  1531. ## @description Determine the status of the daemon referenced
  1532. ## @description by `pidfile`
  1533. ## @audience public
  1534. ## @stability stable
  1535. ## @replaceable yes
  1536. ## @param pidfile
  1537. ## @return (mostly) LSB 4.1.0 compatible status
  1538. function hadoop_status_daemon
  1539. {
  1540. #
  1541. # LSB 4.1.0 compatible status command (1)
  1542. #
  1543. # 0 = program is running
  1544. # 1 = dead, but still a pid (2)
  1545. # 2 = (not used by us)
  1546. # 3 = not running
  1547. #
  1548. # 1 - this is not an endorsement of the LSB
  1549. #
  1550. # 2 - technically, the specification says /var/run/pid, so
  1551. # we should never return this value, but we're giving
  1552. # them the benefit of a doubt and returning 1 even if
  1553. # our pid is not in in /var/run .
  1554. #
  1555. local pidfile=$1
  1556. shift
  1557. local pid
  1558. local pspid
  1559. if [[ -f "${pidfile}" ]]; then
  1560. pid=$(cat "${pidfile}")
  1561. if pspid=$(ps -o args= -p"${pid}" 2>/dev/null); then
  1562. # this is to check that the running process we found is actually the same
  1563. # daemon that we're interested in
  1564. if [[ ${pspid} =~ -Dproc_${daemonname} ]]; then
  1565. return 0
  1566. fi
  1567. fi
  1568. return 1
  1569. fi
  1570. return 3
  1571. }
  1572. ## @description Execute the Java `class`, passing along any `options`.
  1573. ## @description Additionally, set the Java property -Dproc_`command`.
  1574. ## @audience public
  1575. ## @stability stable
  1576. ## @replaceable yes
  1577. ## @param command
  1578. ## @param class
  1579. ## @param [options]
  1580. function hadoop_java_exec
  1581. {
  1582. # run a java command. this is used for
  1583. # non-daemons
  1584. local command=$1
  1585. local class=$2
  1586. shift 2
  1587. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1588. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1589. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1590. hadoop_debug "java: ${JAVA}"
  1591. hadoop_debug "Class name: ${class}"
  1592. hadoop_debug "Command line options: $*"
  1593. export CLASSPATH
  1594. #shellcheck disable=SC2086
  1595. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1596. }
  1597. ## @description Start a non-privileged daemon in the foreground.
  1598. ## @audience private
  1599. ## @stability evolving
  1600. ## @replaceable yes
  1601. ## @param command
  1602. ## @param class
  1603. ## @param pidfile
  1604. ## @param [options]
  1605. function hadoop_start_daemon
  1606. {
  1607. # this is our non-privileged daemon starter
  1608. # that fires up a daemon in the *foreground*
  1609. # so complex! so wow! much java!
  1610. local command=$1
  1611. local class=$2
  1612. local pidfile=$3
  1613. shift 3
  1614. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1615. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1616. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1617. hadoop_debug "java: ${JAVA}"
  1618. hadoop_debug "Class name: ${class}"
  1619. hadoop_debug "Command line options: $*"
  1620. # this is for the non-daemon pid creation
  1621. #shellcheck disable=SC2086
  1622. echo $$ > "${pidfile}" 2>/dev/null
  1623. if [[ $? -gt 0 ]]; then
  1624. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  1625. fi
  1626. export CLASSPATH
  1627. #shellcheck disable=SC2086
  1628. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1629. }
  1630. ## @description Start a non-privileged daemon in the background.
  1631. ## @audience private
  1632. ## @stability evolving
  1633. ## @replaceable yes
  1634. ## @param command
  1635. ## @param class
  1636. ## @param pidfile
  1637. ## @param outfile
  1638. ## @param [options]
  1639. function hadoop_start_daemon_wrapper
  1640. {
  1641. local daemonname=$1
  1642. local class=$2
  1643. local pidfile=$3
  1644. local outfile=$4
  1645. shift 4
  1646. local counter
  1647. hadoop_rotate_log "${outfile}"
  1648. hadoop_start_daemon "${daemonname}" \
  1649. "$class" \
  1650. "${pidfile}" \
  1651. "$@" >> "${outfile}" 2>&1 < /dev/null &
  1652. # we need to avoid a race condition here
  1653. # so let's wait for the fork to finish
  1654. # before overriding with the daemonized pid
  1655. (( counter=0 ))
  1656. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  1657. sleep 1
  1658. (( counter++ ))
  1659. done
  1660. # this is for daemon pid creation
  1661. #shellcheck disable=SC2086
  1662. echo $! > "${pidfile}" 2>/dev/null
  1663. if [[ $? -gt 0 ]]; then
  1664. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  1665. fi
  1666. # shellcheck disable=SC2086
  1667. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1668. if [[ $? -gt 0 ]]; then
  1669. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1670. fi
  1671. # shellcheck disable=SC2086
  1672. disown %+ >/dev/null 2>&1
  1673. if [[ $? -gt 0 ]]; then
  1674. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1675. fi
  1676. sleep 1
  1677. # capture the ulimit output
  1678. ulimit -a >> "${outfile}" 2>&1
  1679. # shellcheck disable=SC2086
  1680. if ! ps -p $! >/dev/null 2>&1; then
  1681. return 1
  1682. fi
  1683. return 0
  1684. }
  1685. ## @description Start a privileged daemon in the foreground.
  1686. ## @audience private
  1687. ## @stability evolving
  1688. ## @replaceable yes
  1689. ## @param command
  1690. ## @param class
  1691. ## @param daemonpidfile
  1692. ## @param daemonoutfile
  1693. ## @param daemonerrfile
  1694. ## @param wrapperpidfile
  1695. ## @param [options]
  1696. function hadoop_start_secure_daemon
  1697. {
  1698. # this is used to launch a secure daemon in the *foreground*
  1699. #
  1700. local daemonname=$1
  1701. local class=$2
  1702. # pid file to create for our daemon
  1703. local daemonpidfile=$3
  1704. # where to send stdout. jsvc has bad habits so this *may* be &1
  1705. # which means you send it to stdout!
  1706. local daemonoutfile=$4
  1707. # where to send stderr. same thing, except &2 = stderr
  1708. local daemonerrfile=$5
  1709. local privpidfile=$6
  1710. shift 6
  1711. hadoop_rotate_log "${daemonoutfile}"
  1712. hadoop_rotate_log "${daemonerrfile}"
  1713. # shellcheck disable=SC2153
  1714. jsvc="${JSVC_HOME}/jsvc"
  1715. if [[ ! -f "${jsvc}" ]]; then
  1716. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  1717. hadoop_error "or privileged daemons. Please download and install jsvc from "
  1718. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  1719. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  1720. exit 1
  1721. fi
  1722. # note that shellcheck will throw a
  1723. # bogus for-our-use-case 2086 here.
  1724. # it doesn't properly support multi-line situations
  1725. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1726. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1727. hadoop_debug "Final JSVC_HOME: ${JSVC_HOME}"
  1728. hadoop_debug "jsvc: ${jsvc}"
  1729. hadoop_debug "Class name: ${class}"
  1730. hadoop_debug "Command line options: $*"
  1731. #shellcheck disable=SC2086
  1732. echo $$ > "${privpidfile}" 2>/dev/null
  1733. if [[ $? -gt 0 ]]; then
  1734. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  1735. fi
  1736. # shellcheck disable=SC2086
  1737. exec "${jsvc}" \
  1738. "-Dproc_${daemonname}" \
  1739. -outfile "${daemonoutfile}" \
  1740. -errfile "${daemonerrfile}" \
  1741. -pidfile "${daemonpidfile}" \
  1742. -nodetach \
  1743. -user "${HADOOP_SECURE_USER}" \
  1744. -cp "${CLASSPATH}" \
  1745. ${HADOOP_OPTS} \
  1746. "${class}" "$@"
  1747. }
  1748. ## @description Start a privileged daemon in the background.
  1749. ## @audience private
  1750. ## @stability evolving
  1751. ## @replaceable yes
  1752. ## @param command
  1753. ## @param class
  1754. ## @param daemonpidfile
  1755. ## @param daemonoutfile
  1756. ## @param wrapperpidfile
  1757. ## @param warpperoutfile
  1758. ## @param daemonerrfile
  1759. ## @param [options]
  1760. function hadoop_start_secure_daemon_wrapper
  1761. {
  1762. # this wraps hadoop_start_secure_daemon to take care
  1763. # of the dirty work to launch a daemon in the background!
  1764. local daemonname=$1
  1765. local class=$2
  1766. # same rules as hadoop_start_secure_daemon except we
  1767. # have some additional parameters
  1768. local daemonpidfile=$3
  1769. local daemonoutfile=$4
  1770. # the pid file of the subprocess that spawned our
  1771. # secure launcher
  1772. local jsvcpidfile=$5
  1773. # the output of the subprocess that spawned our secure
  1774. # launcher
  1775. local jsvcoutfile=$6
  1776. local daemonerrfile=$7
  1777. shift 7
  1778. local counter
  1779. hadoop_rotate_log "${jsvcoutfile}"
  1780. hadoop_start_secure_daemon \
  1781. "${daemonname}" \
  1782. "${class}" \
  1783. "${daemonpidfile}" \
  1784. "${daemonoutfile}" \
  1785. "${daemonerrfile}" \
  1786. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  1787. # we need to avoid a race condition here
  1788. # so let's wait for the fork to finish
  1789. # before overriding with the daemonized pid
  1790. (( counter=0 ))
  1791. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  1792. sleep 1
  1793. (( counter++ ))
  1794. done
  1795. #shellcheck disable=SC2086
  1796. if ! echo $! > "${jsvcpidfile}"; then
  1797. hadoop_error "ERROR: Cannot write ${daemonname} pid ${jsvcpidfile}."
  1798. fi
  1799. sleep 1
  1800. #shellcheck disable=SC2086
  1801. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1802. if [[ $? -gt 0 ]]; then
  1803. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1804. fi
  1805. if [[ -f "${daemonpidfile}" ]]; then
  1806. #shellcheck disable=SC2046
  1807. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  1808. if [[ $? -gt 0 ]]; then
  1809. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  1810. fi
  1811. fi
  1812. #shellcheck disable=SC2046
  1813. disown %+ >/dev/null 2>&1
  1814. if [[ $? -gt 0 ]]; then
  1815. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1816. fi
  1817. # capture the ulimit output
  1818. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  1819. #shellcheck disable=SC2086
  1820. if ! ps -p $! >/dev/null 2>&1; then
  1821. return 1
  1822. fi
  1823. return 0
  1824. }
  1825. ## @description Stop the non-privileged `command` daemon with that
  1826. ## @description that is running at `pidfile`.
  1827. ## @audience public
  1828. ## @stability stable
  1829. ## @replaceable yes
  1830. ## @param command
  1831. ## @param pidfile
  1832. function hadoop_stop_daemon
  1833. {
  1834. local cmd=$1
  1835. local pidfile=$2
  1836. shift 2
  1837. local pid
  1838. local cur_pid
  1839. if [[ -f "${pidfile}" ]]; then
  1840. pid=$(cat "$pidfile")
  1841. kill "${pid}" >/dev/null 2>&1
  1842. sleep "${HADOOP_STOP_TIMEOUT}"
  1843. if kill -0 "${pid}" > /dev/null 2>&1; then
  1844. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  1845. kill -9 "${pid}" >/dev/null 2>&1
  1846. fi
  1847. if ps -p "${pid}" > /dev/null 2>&1; then
  1848. hadoop_error "ERROR: Unable to kill ${pid}"
  1849. else
  1850. cur_pid=$(cat "$pidfile")
  1851. if [[ "${pid}" = "${cur_pid}" ]]; then
  1852. rm -f "${pidfile}" >/dev/null 2>&1
  1853. else
  1854. hadoop_error "WARNING: pid has changed for ${cmd}, skip deleting pid file"
  1855. fi
  1856. fi
  1857. fi
  1858. }
  1859. ## @description Stop the privileged `command` daemon with that
  1860. ## @description that is running at `daemonpidfile` and launched with
  1861. ## @description the wrapper at `wrapperpidfile`.
  1862. ## @audience public
  1863. ## @stability stable
  1864. ## @replaceable yes
  1865. ## @param command
  1866. ## @param daemonpidfile
  1867. ## @param wrapperpidfile
  1868. function hadoop_stop_secure_daemon
  1869. {
  1870. local command=$1
  1871. local daemonpidfile=$2
  1872. local privpidfile=$3
  1873. shift 3
  1874. local ret
  1875. local daemon_pid
  1876. local priv_pid
  1877. local cur_daemon_pid
  1878. local cur_priv_pid
  1879. daemon_pid=$(cat "$daemonpidfile")
  1880. priv_pid=$(cat "$privpidfile")
  1881. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  1882. ret=$?
  1883. cur_daemon_pid=$(cat "$daemonpidfile")
  1884. cur_priv_pid=$(cat "$privpidfile")
  1885. if [[ "${daemon_pid}" = "${cur_daemon_pid}" ]]; then
  1886. rm -f "${daemonpidfile}" >/dev/null 2>&1
  1887. else
  1888. hadoop_error "WARNING: daemon pid has changed for ${command}, skip deleting daemon pid file"
  1889. fi
  1890. if [[ "${priv_pid}" = "${cur_priv_pid}" ]]; then
  1891. rm -f "${privpidfile}" >/dev/null 2>&1
  1892. else
  1893. hadoop_error "WARNING: priv pid has changed for ${command}, skip deleting priv pid file"
  1894. fi
  1895. return ${ret}
  1896. }
  1897. ## @description Manage a non-privileged daemon.
  1898. ## @audience private
  1899. ## @stability evolving
  1900. ## @replaceable yes
  1901. ## @param [start|stop|status|default]
  1902. ## @param command
  1903. ## @param class
  1904. ## @param daemonpidfile
  1905. ## @param daemonoutfile
  1906. ## @param [options]
  1907. function hadoop_daemon_handler
  1908. {
  1909. local daemonmode=$1
  1910. local daemonname=$2
  1911. local class=$3
  1912. local daemon_pidfile=$4
  1913. local daemon_outfile=$5
  1914. shift 5
  1915. case ${daemonmode} in
  1916. status)
  1917. hadoop_status_daemon "${daemon_pidfile}"
  1918. exit $?
  1919. ;;
  1920. stop)
  1921. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  1922. exit $?
  1923. ;;
  1924. ##COMPAT -- older hadoops would also start daemons by default
  1925. start|default)
  1926. hadoop_verify_piddir
  1927. hadoop_verify_logdir
  1928. hadoop_status_daemon "${daemon_pidfile}"
  1929. if [[ $? == 0 ]]; then
  1930. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1931. exit 1
  1932. else
  1933. # stale pid file, so just remove it and continue on
  1934. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1935. fi
  1936. ##COMPAT - differenticate between --daemon start and nothing
  1937. # "nothing" shouldn't detach
  1938. if [[ "$daemonmode" = "default" ]]; then
  1939. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  1940. else
  1941. hadoop_start_daemon_wrapper "${daemonname}" \
  1942. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  1943. fi
  1944. ;;
  1945. esac
  1946. }
  1947. ## @description Manage a privileged daemon.
  1948. ## @audience private
  1949. ## @stability evolving
  1950. ## @replaceable yes
  1951. ## @param [start|stop|status|default]
  1952. ## @param command
  1953. ## @param class
  1954. ## @param daemonpidfile
  1955. ## @param daemonoutfile
  1956. ## @param wrapperpidfile
  1957. ## @param wrapperoutfile
  1958. ## @param wrappererrfile
  1959. ## @param [options]
  1960. function hadoop_secure_daemon_handler
  1961. {
  1962. local daemonmode=$1
  1963. local daemonname=$2
  1964. local classname=$3
  1965. local daemon_pidfile=$4
  1966. local daemon_outfile=$5
  1967. local priv_pidfile=$6
  1968. local priv_outfile=$7
  1969. local priv_errfile=$8
  1970. shift 8
  1971. case ${daemonmode} in
  1972. status)
  1973. hadoop_status_daemon "${daemon_pidfile}"
  1974. exit $?
  1975. ;;
  1976. stop)
  1977. hadoop_stop_secure_daemon "${daemonname}" \
  1978. "${daemon_pidfile}" "${priv_pidfile}"
  1979. exit $?
  1980. ;;
  1981. ##COMPAT -- older hadoops would also start daemons by default
  1982. start|default)
  1983. hadoop_verify_piddir
  1984. hadoop_verify_logdir
  1985. hadoop_status_daemon "${daemon_pidfile}"
  1986. if [[ $? == 0 ]]; then
  1987. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1988. exit 1
  1989. else
  1990. # stale pid file, so just remove it and continue on
  1991. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1992. fi
  1993. ##COMPAT - differenticate between --daemon start and nothing
  1994. # "nothing" shouldn't detach
  1995. if [[ "${daemonmode}" = "default" ]]; then
  1996. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  1997. "${daemon_pidfile}" "${daemon_outfile}" \
  1998. "${priv_errfile}" "${priv_pidfile}" "$@"
  1999. else
  2000. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  2001. "${daemon_pidfile}" "${daemon_outfile}" \
  2002. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  2003. fi
  2004. ;;
  2005. esac
  2006. }
  2007. ## @description autodetect whether this is a priv subcmd
  2008. ## @description by whether or not a priv user var exists
  2009. ## @description and if HADOOP_SECURE_CLASSNAME is defined
  2010. ## @audience public
  2011. ## @stability stable
  2012. ## @replaceable yes
  2013. ## @param command
  2014. ## @param subcommand
  2015. ## @return 1 = not priv
  2016. ## @return 0 = priv
  2017. function hadoop_detect_priv_subcmd
  2018. {
  2019. declare program=$1
  2020. declare command=$2
  2021. if [[ -z "${HADOOP_SECURE_CLASSNAME}" ]]; then
  2022. hadoop_debug "No secure classname defined."
  2023. return 1
  2024. fi
  2025. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_USER)
  2026. if [[ -z "${!uvar}" ]]; then
  2027. hadoop_debug "No secure user defined."
  2028. return 1
  2029. fi
  2030. return 0
  2031. }
  2032. ## @description Build custom subcommand var
  2033. ## @audience public
  2034. ## @stability stable
  2035. ## @replaceable yes
  2036. ## @param command
  2037. ## @param subcommand
  2038. ## @param customid
  2039. ## @return string
  2040. function hadoop_build_custom_subcmd_var
  2041. {
  2042. declare program=$1
  2043. declare command=$2
  2044. declare custom=$3
  2045. declare uprogram
  2046. declare ucommand
  2047. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  2048. || [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then
  2049. uprogram=$(echo "${program}" | tr '[:lower:]' '[:upper:]')
  2050. ucommand=$(echo "${command}" | tr '[:lower:]' '[:upper:]')
  2051. else
  2052. uprogram=${program^^}
  2053. ucommand=${command^^}
  2054. fi
  2055. echo "${uprogram}_${ucommand}_${custom}"
  2056. }
  2057. ## @description Verify that username in a var converts to user id
  2058. ## @audience public
  2059. ## @stability stable
  2060. ## @replaceable yes
  2061. ## @param userstring
  2062. ## @return 0 for success
  2063. ## @return 1 for failure
  2064. function hadoop_verify_user_resolves
  2065. {
  2066. declare userstr=$1
  2067. if [[ -z ${userstr} || -z ${!userstr} ]] ; then
  2068. return 1
  2069. fi
  2070. id -u "${!userstr}" >/dev/null 2>&1
  2071. }
  2072. ## @description Verify that ${USER} is allowed to execute the
  2073. ## @description given subcommand.
  2074. ## @audience public
  2075. ## @stability stable
  2076. ## @replaceable yes
  2077. ## @param command
  2078. ## @param subcommand
  2079. ## @return return 0 on success
  2080. ## @return exit 1 on failure
  2081. function hadoop_verify_user_perm
  2082. {
  2083. declare program=$1
  2084. declare command=$2
  2085. declare uvar
  2086. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  2087. if [[ -n ${!uvar} ]]; then
  2088. if [[ ${!uvar} != "${USER}" ]]; then
  2089. hadoop_error "ERROR: ${command} can only be executed by ${!uvar}."
  2090. exit 1
  2091. fi
  2092. fi
  2093. return 0
  2094. }
  2095. ## @description Verify that ${USER} is allowed to execute the
  2096. ## @description given subcommand.
  2097. ## @audience public
  2098. ## @stability stable
  2099. ## @replaceable yes
  2100. ## @param subcommand
  2101. ## @return 1 on no re-exec needed
  2102. ## @return 0 on need to re-exec
  2103. function hadoop_need_reexec
  2104. {
  2105. declare program=$1
  2106. declare command=$2
  2107. declare uvar
  2108. # we've already been re-execed, bail
  2109. if [[ "${HADOOP_REEXECED_CMD}" = true ]]; then
  2110. return 1
  2111. fi
  2112. # if we have privilege, and the _USER is defined, and _USER is
  2113. # set to someone who isn't us, then yes, we should re-exec.
  2114. # otherwise no, don't re-exec and let the system deal with it.
  2115. if hadoop_privilege_check; then
  2116. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  2117. if [[ -n ${!uvar} ]]; then
  2118. if [[ ${!uvar} != "${USER}" ]]; then
  2119. return 0
  2120. fi
  2121. fi
  2122. fi
  2123. return 1
  2124. }
  2125. ## @description Add custom (program)_(command)_OPTS to HADOOP_OPTS.
  2126. ## @description Also handles the deprecated cases from pre-3.x.
  2127. ## @audience public
  2128. ## @stability evolving
  2129. ## @replaceable yes
  2130. ## @param program
  2131. ## @param subcommand
  2132. ## @return will exit on failure conditions
  2133. function hadoop_subcommand_opts
  2134. {
  2135. declare program=$1
  2136. declare command=$2
  2137. declare uvar
  2138. declare depvar
  2139. declare uprogram
  2140. declare ucommand
  2141. if [[ -z "${program}" || -z "${command}" ]]; then
  2142. return 1
  2143. fi
  2144. # bash 4 and up have built-in ways to upper and lower
  2145. # case the contents of vars. This is faster than
  2146. # calling tr.
  2147. ## We don't call hadoop_build_custom_subcmd_var here
  2148. ## since we need to construct this for the deprecation
  2149. ## cases. For Hadoop 4.x, this needs to get cleaned up.
  2150. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  2151. || [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then
  2152. uprogram=$(echo "${program}" | tr '[:lower:]' '[:upper:]')
  2153. ucommand=$(echo "${command}" | tr '[:lower:]' '[:upper:]')
  2154. else
  2155. uprogram=${program^^}
  2156. ucommand=${command^^}
  2157. fi
  2158. uvar="${uprogram}_${ucommand}_OPTS"
  2159. # Let's handle all of the deprecation cases early
  2160. # HADOOP_NAMENODE_OPTS -> HDFS_NAMENODE_OPTS
  2161. depvar="HADOOP_${ucommand}_OPTS"
  2162. if [[ "${depvar}" != "${uvar}" ]]; then
  2163. if [[ -n "${!depvar}" ]]; then
  2164. hadoop_deprecate_envvar "${depvar}" "${uvar}"
  2165. fi
  2166. fi
  2167. if [[ -n ${!uvar} ]]; then
  2168. hadoop_debug "Appending ${uvar} onto HADOOP_OPTS"
  2169. HADOOP_OPTS="${HADOOP_OPTS} ${!uvar}"
  2170. return 0
  2171. fi
  2172. }
  2173. ## @description Add custom (program)_(command)_SECURE_EXTRA_OPTS to HADOOP_OPTS.
  2174. ## @description This *does not* handle the pre-3.x deprecated cases
  2175. ## @audience public
  2176. ## @stability stable
  2177. ## @replaceable yes
  2178. ## @param program
  2179. ## @param subcommand
  2180. ## @return will exit on failure conditions
  2181. function hadoop_subcommand_secure_opts
  2182. {
  2183. declare program=$1
  2184. declare command=$2
  2185. declare uvar
  2186. declare uprogram
  2187. declare ucommand
  2188. if [[ -z "${program}" || -z "${command}" ]]; then
  2189. return 1
  2190. fi
  2191. # HDFS_DATANODE_SECURE_EXTRA_OPTS
  2192. # HDFS_NFS3_SECURE_EXTRA_OPTS
  2193. # ...
  2194. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_EXTRA_OPTS)
  2195. if [[ -n ${!uvar} ]]; then
  2196. hadoop_debug "Appending ${uvar} onto HADOOP_OPTS"
  2197. HADOOP_OPTS="${HADOOP_OPTS} ${!uvar}"
  2198. return 0
  2199. fi
  2200. }
  2201. ## @description Perform the 'hadoop classpath', etc subcommand with the given
  2202. ## @description parameters
  2203. ## @audience private
  2204. ## @stability evolving
  2205. ## @replaceable yes
  2206. ## @param [parameters]
  2207. ## @return will print & exit with no params
  2208. function hadoop_do_classpath_subcommand
  2209. {
  2210. if [[ "$#" -gt 1 ]]; then
  2211. eval "$1"=org.apache.hadoop.util.Classpath
  2212. else
  2213. hadoop_finalize
  2214. echo "${CLASSPATH}"
  2215. exit 0
  2216. fi
  2217. }
  2218. ## @description generic shell script opton parser. sets
  2219. ## @description HADOOP_PARSE_COUNTER to set number the
  2220. ## @description caller should shift
  2221. ## @audience private
  2222. ## @stability evolving
  2223. ## @replaceable yes
  2224. ## @param [parameters, typically "$@"]
  2225. function hadoop_parse_args
  2226. {
  2227. HADOOP_DAEMON_MODE="default"
  2228. HADOOP_PARSE_COUNTER=0
  2229. # not all of the options supported here are supported by all commands
  2230. # however these are:
  2231. hadoop_add_option "--config dir" "Hadoop config directory"
  2232. hadoop_add_option "--debug" "turn on shell script debug mode"
  2233. hadoop_add_option "--help" "usage information"
  2234. while true; do
  2235. hadoop_debug "hadoop_parse_args: processing $1"
  2236. case $1 in
  2237. --buildpaths)
  2238. HADOOP_ENABLE_BUILD_PATHS=true
  2239. shift
  2240. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2241. ;;
  2242. --config)
  2243. shift
  2244. confdir=$1
  2245. shift
  2246. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2247. if [[ -d "${confdir}" ]]; then
  2248. HADOOP_CONF_DIR="${confdir}"
  2249. elif [[ -z "${confdir}" ]]; then
  2250. hadoop_error "ERROR: No parameter provided for --config "
  2251. hadoop_exit_with_usage 1
  2252. else
  2253. hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
  2254. hadoop_exit_with_usage 1
  2255. fi
  2256. ;;
  2257. --daemon)
  2258. shift
  2259. HADOOP_DAEMON_MODE=$1
  2260. shift
  2261. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2262. if [[ -z "${HADOOP_DAEMON_MODE}" || \
  2263. ! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
  2264. hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
  2265. hadoop_exit_with_usage 1
  2266. fi
  2267. ;;
  2268. --debug)
  2269. shift
  2270. HADOOP_SHELL_SCRIPT_DEBUG=true
  2271. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2272. ;;
  2273. --help|-help|-h|help|--h|--\?|-\?|\?)
  2274. hadoop_exit_with_usage 0
  2275. ;;
  2276. --hostnames)
  2277. shift
  2278. HADOOP_WORKER_NAMES="$1"
  2279. shift
  2280. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2281. ;;
  2282. --hosts)
  2283. shift
  2284. hadoop_populate_workers_file "$1"
  2285. shift
  2286. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2287. ;;
  2288. --loglevel)
  2289. shift
  2290. # shellcheck disable=SC2034
  2291. HADOOP_LOGLEVEL="$1"
  2292. shift
  2293. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2294. ;;
  2295. --reexec)
  2296. shift
  2297. if [[ "${HADOOP_REEXECED_CMD}" = true ]]; then
  2298. hadoop_error "ERROR: re-exec fork bomb prevention: --reexec already called"
  2299. exit 1
  2300. fi
  2301. HADOOP_REEXECED_CMD=true
  2302. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2303. ;;
  2304. --workers)
  2305. shift
  2306. # shellcheck disable=SC2034
  2307. HADOOP_WORKER_MODE=true
  2308. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2309. ;;
  2310. *)
  2311. break
  2312. ;;
  2313. esac
  2314. done
  2315. hadoop_debug "hadoop_parse: asking caller to skip ${HADOOP_PARSE_COUNTER}"
  2316. }
  2317. ## @description Handle subcommands from main program entries
  2318. ## @audience private
  2319. ## @stability evolving
  2320. ## @replaceable yes
  2321. function hadoop_generic_java_subcmd_handler
  2322. {
  2323. declare priv_outfile
  2324. declare priv_errfile
  2325. declare priv_pidfile
  2326. declare daemon_outfile
  2327. declare daemon_pidfile
  2328. declare secureuser
  2329. # The default/expected way to determine if a daemon is going to run in secure
  2330. # mode is defined by hadoop_detect_priv_subcmd. If this returns true
  2331. # then setup the secure user var and tell the world we're in secure mode
  2332. if hadoop_detect_priv_subcmd "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"; then
  2333. HADOOP_SUBCMD_SECURESERVICE=true
  2334. secureuser=$(hadoop_build_custom_subcmd_var "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}" SECURE_USER)
  2335. if ! hadoop_verify_user_resolves "${secureuser}"; then
  2336. hadoop_error "ERROR: User defined in ${secureuser} (${!secureuser}) does not exist. Aborting."
  2337. exit 1
  2338. fi
  2339. HADOOP_SECURE_USER="${!secureuser}"
  2340. fi
  2341. # check if we're running in secure mode.
  2342. # breaking this up from the above lets 3rd parties
  2343. # do things a bit different
  2344. # secure services require some extra setup
  2345. # if yes, then we need to define all of the priv and daemon stuff
  2346. # if not, then we just need to define daemon stuff.
  2347. # note the daemon vars are purposefully different between the two
  2348. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2349. hadoop_subcommand_secure_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
  2350. hadoop_verify_secure_prereq
  2351. hadoop_setup_secure_service
  2352. priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2353. priv_errfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.err"
  2354. priv_pidfile="${HADOOP_PID_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2355. daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2356. daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2357. else
  2358. daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2359. daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2360. fi
  2361. # are we actually in daemon mode?
  2362. # if yes, use the daemon logger and the appropriate log file.
  2363. if [[ "${HADOOP_DAEMON_MODE}" != "default" ]]; then
  2364. HADOOP_ROOT_LOGGER="${HADOOP_DAEMON_ROOT_LOGGER}"
  2365. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2366. HADOOP_LOGFILE="hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
  2367. else
  2368. HADOOP_LOGFILE="hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
  2369. fi
  2370. fi
  2371. # finish defining the environment: system properties, env vars, class paths, etc.
  2372. hadoop_finalize
  2373. # do the hard work of launching a daemon or just executing our interactive
  2374. # java class
  2375. if [[ "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" = true ]]; then
  2376. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2377. hadoop_secure_daemon_handler \
  2378. "${HADOOP_DAEMON_MODE}" \
  2379. "${HADOOP_SUBCMD}" \
  2380. "${HADOOP_SECURE_CLASSNAME}" \
  2381. "${daemon_pidfile}" \
  2382. "${daemon_outfile}" \
  2383. "${priv_pidfile}" \
  2384. "${priv_outfile}" \
  2385. "${priv_errfile}" \
  2386. "${HADOOP_SUBCMD_ARGS[@]}"
  2387. else
  2388. hadoop_daemon_handler \
  2389. "${HADOOP_DAEMON_MODE}" \
  2390. "${HADOOP_SUBCMD}" \
  2391. "${HADOOP_CLASSNAME}" \
  2392. "${daemon_pidfile}" \
  2393. "${daemon_outfile}" \
  2394. "${HADOOP_SUBCMD_ARGS[@]}"
  2395. fi
  2396. exit $?
  2397. else
  2398. hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "${HADOOP_SUBCMD_ARGS[@]}"
  2399. fi
  2400. }