hadoop-functions.sh 75 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # we need to declare this globally as an array, which can only
  17. # be done outside of a function
  18. declare -a HADOOP_SUBCMD_USAGE
  19. declare -a HADOOP_OPTION_USAGE
  20. declare -a HADOOP_SUBCMD_USAGE_TYPES
  21. ## @description Print a message to stderr
  22. ## @audience public
  23. ## @stability stable
  24. ## @replaceable no
  25. ## @param string
  26. function hadoop_error
  27. {
  28. echo "$*" 1>&2
  29. }
  30. ## @description Print a message to stderr if --debug is turned on
  31. ## @audience public
  32. ## @stability stable
  33. ## @replaceable no
  34. ## @param string
  35. function hadoop_debug
  36. {
  37. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  38. echo "DEBUG: $*" 1>&2
  39. fi
  40. }
  41. ## @description Given a filename or dir, return the absolute version of it
  42. ## @description This works as an alternative to readlink, which isn't
  43. ## @description portable.
  44. ## @audience public
  45. ## @stability stable
  46. ## @param fsobj
  47. ## @replaceable no
  48. ## @return 0 success
  49. ## @return 1 failure
  50. ## @return stdout abspath
  51. function hadoop_abs
  52. {
  53. declare obj=$1
  54. declare dir
  55. declare fn
  56. declare dirret
  57. if [[ ! -e ${obj} ]]; then
  58. return 1
  59. elif [[ -d ${obj} ]]; then
  60. dir=${obj}
  61. else
  62. dir=$(dirname -- "${obj}")
  63. fn=$(basename -- "${obj}")
  64. fn="/${fn}"
  65. fi
  66. dir=$(cd -P -- "${dir}" >/dev/null 2>/dev/null && pwd -P)
  67. dirret=$?
  68. if [[ ${dirret} = 0 ]]; then
  69. echo "${dir}${fn}"
  70. return 0
  71. fi
  72. return 1
  73. }
  74. ## @description Given variable $1 delete $2 from it
  75. ## @audience public
  76. ## @stability stable
  77. ## @replaceable no
  78. function hadoop_delete_entry
  79. {
  80. if [[ ${!1} =~ \ ${2}\ ]] ; then
  81. hadoop_debug "Removing ${2} from ${1}"
  82. eval "${1}"=\""${!1// ${2} }"\"
  83. fi
  84. }
  85. ## @description Given variable $1 add $2 to it
  86. ## @audience public
  87. ## @stability stable
  88. ## @replaceable no
  89. function hadoop_add_entry
  90. {
  91. if [[ ! ${!1} =~ \ ${2}\ ]] ; then
  92. hadoop_debug "Adding ${2} to ${1}"
  93. #shellcheck disable=SC2140
  94. eval "${1}"=\""${!1} ${2} "\"
  95. fi
  96. }
  97. ## @description Given variable $1 determine if $2 is in it
  98. ## @audience public
  99. ## @stability stable
  100. ## @replaceable no
  101. ## @return 0 = yes, 1 = no
  102. function hadoop_verify_entry
  103. {
  104. # this unfortunately can't really be tested by bats. :(
  105. # so if this changes, be aware that unit tests effectively
  106. # do this function in them
  107. [[ ${!1} =~ \ ${2}\ ]]
  108. }
  109. ## @description Check if an array has a given value
  110. ## @audience public
  111. ## @stability stable
  112. ## @replaceable yes
  113. ## @param element
  114. ## @param array
  115. ## @returns 0 = yes
  116. ## @returns 1 = no
  117. function hadoop_array_contains
  118. {
  119. declare element=$1
  120. shift
  121. declare val
  122. if [[ "$#" -eq 0 ]]; then
  123. return 1
  124. fi
  125. for val in "${@}"; do
  126. if [[ "${val}" == "${element}" ]]; then
  127. return 0
  128. fi
  129. done
  130. return 1
  131. }
  132. ## @description Add the `appendstring` if `checkstring` is not
  133. ## @description present in the given array
  134. ## @audience public
  135. ## @stability stable
  136. ## @replaceable yes
  137. ## @param envvar
  138. ## @param appendstring
  139. function hadoop_add_array_param
  140. {
  141. declare arrname=$1
  142. declare add=$2
  143. declare arrref="${arrname}[@]"
  144. declare array=("${!arrref}")
  145. if ! hadoop_array_contains "${add}" "${array[@]}"; then
  146. #shellcheck disable=SC1083,SC2086
  147. eval ${arrname}=\(\"\${array[@]}\" \"${add}\" \)
  148. hadoop_debug "$1 accepted $2"
  149. else
  150. hadoop_debug "$1 declined $2"
  151. fi
  152. }
  153. ## @description Sort an array (must not contain regexps)
  154. ## @description present in the given array
  155. ## @audience public
  156. ## @stability stable
  157. ## @replaceable yes
  158. ## @param arrayvar
  159. function hadoop_sort_array
  160. {
  161. declare arrname=$1
  162. declare arrref="${arrname}[@]"
  163. declare array=("${!arrref}")
  164. declare oifs
  165. declare globstatus
  166. declare -a sa
  167. globstatus=$(set -o | grep noglob | awk '{print $NF}')
  168. set -f
  169. oifs=${IFS}
  170. # shellcheck disable=SC2034
  171. IFS=$'\n' sa=($(sort <<<"${array[*]}"))
  172. # shellcheck disable=SC1083
  173. eval "${arrname}"=\(\"\${sa[@]}\"\)
  174. IFS=${oifs}
  175. if [[ "${globstatus}" = off ]]; then
  176. set +f
  177. fi
  178. }
  179. ## @description Check if we are running with priv
  180. ## @description by default, this implementation looks for
  181. ## @description EUID=0. For OSes that have true priv
  182. ## @description separation, this should be something more complex
  183. ## @audience private
  184. ## @stability evolving
  185. ## @replaceable yes
  186. ## @return 1 = no priv
  187. ## @return 0 = priv
  188. function hadoop_privilege_check
  189. {
  190. [[ "${EUID}" = 0 ]]
  191. }
  192. ## @description Execute a command via su when running as root
  193. ## @description if the given user is found or exit with
  194. ## @description failure if not.
  195. ## @description otherwise just run it. (This is intended to
  196. ## @description be used by the start-*/stop-* scripts.)
  197. ## @audience private
  198. ## @stability evolving
  199. ## @replaceable yes
  200. ## @param user
  201. ## @param commandstring
  202. ## @return exitstatus
  203. function hadoop_su
  204. {
  205. declare user=$1
  206. shift
  207. if hadoop_privilege_check; then
  208. if hadoop_verify_user_resolves user; then
  209. su -l "${user}" -- "$@"
  210. else
  211. hadoop_error "ERROR: Refusing to run as root: ${user} account is not found. Aborting."
  212. return 1
  213. fi
  214. else
  215. "$@"
  216. fi
  217. }
  218. ## @description Execute a command via su when running as root
  219. ## @description with extra support for commands that might
  220. ## @description legitimately start as root (e.g., datanode)
  221. ## @description (This is intended to
  222. ## @description be used by the start-*/stop-* scripts.)
  223. ## @audience private
  224. ## @stability evolving
  225. ## @replaceable no
  226. ## @param user
  227. ## @param commandstring
  228. ## @return exitstatus
  229. function hadoop_uservar_su
  230. {
  231. ## startup matrix:
  232. #
  233. # if $EUID != 0, then exec
  234. # if $EUID =0 then
  235. # if hdfs_subcmd_user is defined, call hadoop_su to exec
  236. # if hdfs_subcmd_user is not defined, error
  237. #
  238. # For secure daemons, this means both the secure and insecure env vars need to be
  239. # defined. e.g., HDFS_DATANODE_USER=root HDFS_DATANODE_SECURE_USER=hdfs
  240. # This function will pick up the "normal" var, switch to that user, then
  241. # execute the command which will then pick up the "secure" version.
  242. #
  243. declare program=$1
  244. declare command=$2
  245. shift 2
  246. declare uprogram
  247. declare ucommand
  248. declare uvar
  249. declare svar
  250. if hadoop_privilege_check; then
  251. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  252. svar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_USER)
  253. if [[ -n "${!uvar}" ]]; then
  254. hadoop_su "${!uvar}" "$@"
  255. elif [[ -n "${!svar}" ]]; then
  256. ## if we are here, then SECURE_USER with no USER defined
  257. ## we are already privileged, so just run the command and hope
  258. ## for the best
  259. "$@"
  260. else
  261. hadoop_error "ERROR: Attempting to operate on ${program} ${command} as root"
  262. hadoop_error "ERROR: but there is no ${uvar} defined. Aborting operation."
  263. return 1
  264. fi
  265. else
  266. "$@"
  267. fi
  268. }
  269. ## @description Add a subcommand to the usage output
  270. ## @audience private
  271. ## @stability evolving
  272. ## @replaceable no
  273. ## @param subcommand
  274. ## @param subcommandtype
  275. ## @param subcommanddesc
  276. function hadoop_add_subcommand
  277. {
  278. declare subcmd=$1
  279. declare subtype=$2
  280. declare text=$3
  281. hadoop_debug "${subcmd} as a ${subtype}"
  282. hadoop_add_array_param HADOOP_SUBCMD_USAGE_TYPES "${subtype}"
  283. # done in this order so that sort works later
  284. HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${subtype}@${text}"
  285. ((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
  286. }
  287. ## @description Add an option to the usage output
  288. ## @audience private
  289. ## @stability evolving
  290. ## @replaceable no
  291. ## @param subcommand
  292. ## @param subcommanddesc
  293. function hadoop_add_option
  294. {
  295. local option=$1
  296. local text=$2
  297. HADOOP_OPTION_USAGE[${HADOOP_OPTION_USAGE_COUNTER}]="${option}@${text}"
  298. ((HADOOP_OPTION_USAGE_COUNTER=HADOOP_OPTION_USAGE_COUNTER+1))
  299. }
  300. ## @description Reset the usage information to blank
  301. ## @audience private
  302. ## @stability evolving
  303. ## @replaceable no
  304. function hadoop_reset_usage
  305. {
  306. HADOOP_SUBCMD_USAGE=()
  307. HADOOP_OPTION_USAGE=()
  308. HADOOP_SUBCMD_USAGE_TYPES=()
  309. HADOOP_SUBCMD_USAGE_COUNTER=0
  310. HADOOP_OPTION_USAGE_COUNTER=0
  311. }
  312. ## @description Print a screen-size aware two-column output
  313. ## @description if reqtype is not null, only print those requested
  314. ## @audience private
  315. ## @stability evolving
  316. ## @replaceable no
  317. ## @param reqtype
  318. ## @param array
  319. function hadoop_generic_columnprinter
  320. {
  321. declare reqtype=$1
  322. shift
  323. declare -a input=("$@")
  324. declare -i i=0
  325. declare -i counter=0
  326. declare line
  327. declare text
  328. declare option
  329. declare giventext
  330. declare -i maxoptsize
  331. declare -i foldsize
  332. declare -a tmpa
  333. declare numcols
  334. declare brup
  335. if [[ -n "${COLUMNS}" ]]; then
  336. numcols=${COLUMNS}
  337. else
  338. numcols=$(tput cols) 2>/dev/null
  339. COLUMNS=${numcols}
  340. fi
  341. if [[ -z "${numcols}"
  342. || ! "${numcols}" =~ ^[0-9]+$ ]]; then
  343. numcols=75
  344. else
  345. ((numcols=numcols-5))
  346. fi
  347. while read -r line; do
  348. tmpa[${counter}]=${line}
  349. ((counter=counter+1))
  350. IFS='@' read -ra brup <<< "${line}"
  351. option="${brup[0]}"
  352. if [[ ${#option} -gt ${maxoptsize} ]]; then
  353. maxoptsize=${#option}
  354. fi
  355. done < <(for text in "${input[@]}"; do
  356. echo "${text}"
  357. done | sort)
  358. i=0
  359. ((foldsize=numcols-maxoptsize))
  360. until [[ $i -eq ${#tmpa[@]} ]]; do
  361. IFS='@' read -ra brup <<< "${tmpa[$i]}"
  362. option="${brup[0]}"
  363. cmdtype="${brup[1]}"
  364. giventext="${brup[2]}"
  365. if [[ -n "${reqtype}" ]]; then
  366. if [[ "${cmdtype}" != "${reqtype}" ]]; then
  367. ((i=i+1))
  368. continue
  369. fi
  370. fi
  371. if [[ -z "${giventext}" ]]; then
  372. giventext=${cmdtype}
  373. fi
  374. while read -r line; do
  375. printf "%-${maxoptsize}s %-s\n" "${option}" "${line}"
  376. option=" "
  377. done < <(echo "${giventext}"| fold -s -w ${foldsize})
  378. ((i=i+1))
  379. done
  380. }
  381. ## @description generate standard usage output
  382. ## @description and optionally takes a class
  383. ## @audience private
  384. ## @stability evolving
  385. ## @replaceable no
  386. ## @param execname
  387. ## @param true|false
  388. ## @param [text to use in place of SUBCOMMAND]
  389. function hadoop_generate_usage
  390. {
  391. declare cmd=$1
  392. declare takesclass=$2
  393. declare subcmdtext=${3:-"SUBCOMMAND"}
  394. declare haveoptions
  395. declare optstring
  396. declare havesubs
  397. declare subcmdstring
  398. declare cmdtype
  399. cmd=${cmd##*/}
  400. if [[ -n "${HADOOP_OPTION_USAGE_COUNTER}"
  401. && "${HADOOP_OPTION_USAGE_COUNTER}" -gt 0 ]]; then
  402. haveoptions=true
  403. optstring=" [OPTIONS]"
  404. fi
  405. if [[ -n "${HADOOP_SUBCMD_USAGE_COUNTER}"
  406. && "${HADOOP_SUBCMD_USAGE_COUNTER}" -gt 0 ]]; then
  407. havesubs=true
  408. subcmdstring=" ${subcmdtext} [${subcmdtext} OPTIONS]"
  409. fi
  410. echo "Usage: ${cmd}${optstring}${subcmdstring}"
  411. if [[ ${takesclass} = true ]]; then
  412. echo " or ${cmd}${optstring} CLASSNAME [CLASSNAME OPTIONS]"
  413. echo " where CLASSNAME is a user-provided Java class"
  414. fi
  415. if [[ "${haveoptions}" = true ]]; then
  416. echo ""
  417. echo " OPTIONS is none or any of:"
  418. echo ""
  419. hadoop_generic_columnprinter "" "${HADOOP_OPTION_USAGE[@]}"
  420. fi
  421. if [[ "${havesubs}" = true ]]; then
  422. echo ""
  423. echo " ${subcmdtext} is one of:"
  424. echo ""
  425. if [[ "${#HADOOP_SUBCMD_USAGE_TYPES[@]}" -gt 0 ]]; then
  426. hadoop_sort_array HADOOP_SUBCMD_USAGE_TYPES
  427. for subtype in "${HADOOP_SUBCMD_USAGE_TYPES[@]}"; do
  428. #shellcheck disable=SC2086
  429. cmdtype="$(tr '[:lower:]' '[:upper:]' <<< ${subtype:0:1})${subtype:1}"
  430. printf "\n %s Commands:\n\n" "${cmdtype}"
  431. hadoop_generic_columnprinter "${subtype}" "${HADOOP_SUBCMD_USAGE[@]}"
  432. done
  433. else
  434. hadoop_generic_columnprinter "" "${HADOOP_SUBCMD_USAGE[@]}"
  435. fi
  436. echo ""
  437. echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
  438. fi
  439. }
  440. ## @description Replace `oldvar` with `newvar` if `oldvar` exists.
  441. ## @audience public
  442. ## @stability stable
  443. ## @replaceable yes
  444. ## @param oldvar
  445. ## @param newvar
  446. function hadoop_deprecate_envvar
  447. {
  448. local oldvar=$1
  449. local newvar=$2
  450. local oldval=${!oldvar}
  451. local newval=${!newvar}
  452. if [[ -n "${oldval}" ]]; then
  453. hadoop_error "WARNING: ${oldvar} has been replaced by ${newvar}. Using value of ${oldvar}."
  454. # shellcheck disable=SC2086
  455. eval ${newvar}=\"${oldval}\"
  456. # shellcheck disable=SC2086
  457. newval=${oldval}
  458. # shellcheck disable=SC2086
  459. eval ${newvar}=\"${newval}\"
  460. fi
  461. }
  462. ## @description Declare `var` being used and print its value.
  463. ## @audience public
  464. ## @stability stable
  465. ## @replaceable yes
  466. ## @param var
  467. function hadoop_using_envvar
  468. {
  469. local var=$1
  470. local val=${!var}
  471. if [[ -n "${val}" ]]; then
  472. hadoop_debug "${var} = ${val}"
  473. fi
  474. }
  475. ## @description Create the directory 'dir'.
  476. ## @audience public
  477. ## @stability stable
  478. ## @replaceable yes
  479. ## @param dir
  480. function hadoop_mkdir
  481. {
  482. local dir=$1
  483. if [[ ! -w "${dir}" ]] && [[ ! -d "${dir}" ]]; then
  484. hadoop_error "WARNING: ${dir} does not exist. Creating."
  485. if ! mkdir -p "${dir}"; then
  486. hadoop_error "ERROR: Unable to create ${dir}. Aborting."
  487. exit 1
  488. fi
  489. fi
  490. }
  491. ## @description Bootstraps the Hadoop shell environment
  492. ## @audience private
  493. ## @stability evolving
  494. ## @replaceable no
  495. function hadoop_bootstrap
  496. {
  497. # the root of the Hadoop installation
  498. # See HADOOP-6255 for the expected directory structure layout
  499. if [[ -n "${DEFAULT_LIBEXEC_DIR}" ]]; then
  500. hadoop_error "WARNING: DEFAULT_LIBEXEC_DIR ignored. It has been replaced by HADOOP_DEFAULT_LIBEXEC_DIR."
  501. fi
  502. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  503. # We can piggyback off of that to figure out where the default
  504. # HADOOP_FREFIX should be. This allows us to run without
  505. # HADOOP_HOME ever being defined by a human! As a consequence
  506. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  507. # env var within Hadoop.
  508. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  509. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  510. exit 1
  511. fi
  512. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  513. HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DEFAULT_PREFIX}
  514. export HADOOP_HOME
  515. #
  516. # short-cuts. vendors may redefine these as well, preferably
  517. # in hadoop-layouts.sh
  518. #
  519. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  520. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  521. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  522. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  523. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  524. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  525. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  526. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  527. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  528. HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}}
  529. HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
  530. HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}
  531. # by default, whatever we are about to run doesn't support
  532. # daemonization
  533. HADOOP_SUBCMD_SUPPORTDAEMONIZATION=false
  534. # by default, we have not been self-re-execed
  535. HADOOP_REEXECED_CMD=false
  536. HADOOP_SUBCMD_SECURESERVICE=false
  537. # This is the default we claim in hadoop-env.sh
  538. JSVC_HOME=${JSVC_HOME:-"/usr/bin"}
  539. # usage output set to zero
  540. hadoop_reset_usage
  541. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  542. # defaults
  543. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  544. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  545. }
  546. ## @description Locate Hadoop's configuration directory
  547. ## @audience private
  548. ## @stability evolving
  549. ## @replaceable no
  550. function hadoop_find_confdir
  551. {
  552. local conf_dir
  553. # An attempt at compatibility with some Hadoop 1.x
  554. # installs.
  555. if [[ -e "${HADOOP_HOME}/conf/hadoop-env.sh" ]]; then
  556. conf_dir="conf"
  557. else
  558. conf_dir="etc/hadoop"
  559. fi
  560. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_HOME}/${conf_dir}}"
  561. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  562. }
  563. ## @description Validate ${HADOOP_CONF_DIR}
  564. ## @audience public
  565. ## @stability stable
  566. ## @replaceable yes
  567. ## @return will exit on failure conditions
  568. function hadoop_verify_confdir
  569. {
  570. # Check only log4j.properties by default.
  571. # --loglevel does not work without logger settings in log4j.log4j.properties.
  572. if [[ ! -f "${HADOOP_CONF_DIR}/log4j.properties" ]]; then
  573. hadoop_error "WARNING: log4j.properties is not found. HADOOP_CONF_DIR may be incomplete."
  574. fi
  575. }
  576. ## @description Import the hadoop-env.sh settings
  577. ## @audience private
  578. ## @stability evolving
  579. ## @replaceable no
  580. function hadoop_exec_hadoopenv
  581. {
  582. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  583. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  584. export HADOOP_ENV_PROCESSED=true
  585. # shellcheck source=./hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
  586. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  587. fi
  588. fi
  589. }
  590. ## @description Import the replaced functions
  591. ## @audience private
  592. ## @stability evolving
  593. ## @replaceable no
  594. function hadoop_exec_userfuncs
  595. {
  596. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  597. # shellcheck disable=SC1090
  598. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  599. fi
  600. }
  601. ## @description Read the user's settings. This provides for users to
  602. ## @description override and/or append hadoop-env.sh. It is not meant
  603. ## @description as a complete system override.
  604. ## @audience private
  605. ## @stability evolving
  606. ## @replaceable yes
  607. function hadoop_exec_user_hadoopenv
  608. {
  609. if [[ -f "${HOME}/.hadoop-env" ]]; then
  610. hadoop_debug "Applying the user's .hadoop-env"
  611. # shellcheck disable=SC1090
  612. . "${HOME}/.hadoop-env"
  613. fi
  614. }
  615. ## @description Read the user's settings. This provides for users to
  616. ## @description run Hadoop Shell API after system bootstrap
  617. ## @audience private
  618. ## @stability evolving
  619. ## @replaceable yes
  620. function hadoop_exec_hadooprc
  621. {
  622. if [[ -f "${HOME}/.hadooprc" ]]; then
  623. hadoop_debug "Applying the user's .hadooprc"
  624. # shellcheck disable=SC1090
  625. . "${HOME}/.hadooprc"
  626. fi
  627. }
  628. ## @description Import shellprofile.d content
  629. ## @audience private
  630. ## @stability evolving
  631. ## @replaceable yes
  632. function hadoop_import_shellprofiles
  633. {
  634. local i
  635. local files1
  636. local files2
  637. if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
  638. files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
  639. hadoop_debug "shellprofiles: ${files1[*]}"
  640. else
  641. hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
  642. fi
  643. if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
  644. files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
  645. fi
  646. # enable bundled shellprofiles that come
  647. # from hadoop-tools. This converts the user-facing HADOOP_OPTIONAL_TOOLS
  648. # to the HADOOP_TOOLS_OPTIONS that the shell profiles expect.
  649. # See dist-tools-hooks-maker for how the example HADOOP_OPTIONAL_TOOLS
  650. # gets populated into hadoop-env.sh
  651. for i in ${HADOOP_OPTIONAL_TOOLS//,/ }; do
  652. hadoop_add_entry HADOOP_TOOLS_OPTIONS "${i}"
  653. done
  654. for i in "${files1[@]}" "${files2[@]}"
  655. do
  656. if [[ -n "${i}"
  657. && -f "${i}" ]]; then
  658. hadoop_debug "Profiles: importing ${i}"
  659. # shellcheck disable=SC1090
  660. . "${i}"
  661. fi
  662. done
  663. }
  664. ## @description Initialize the registered shell profiles
  665. ## @audience private
  666. ## @stability evolving
  667. ## @replaceable yes
  668. function hadoop_shellprofiles_init
  669. {
  670. local i
  671. for i in ${HADOOP_SHELL_PROFILES}
  672. do
  673. if declare -F _${i}_hadoop_init >/dev/null ; then
  674. hadoop_debug "Profiles: ${i} init"
  675. # shellcheck disable=SC2086
  676. _${i}_hadoop_init
  677. fi
  678. done
  679. }
  680. ## @description Apply the shell profile classpath additions
  681. ## @audience private
  682. ## @stability evolving
  683. ## @replaceable yes
  684. function hadoop_shellprofiles_classpath
  685. {
  686. local i
  687. for i in ${HADOOP_SHELL_PROFILES}
  688. do
  689. if declare -F _${i}_hadoop_classpath >/dev/null ; then
  690. hadoop_debug "Profiles: ${i} classpath"
  691. # shellcheck disable=SC2086
  692. _${i}_hadoop_classpath
  693. fi
  694. done
  695. }
  696. ## @description Apply the shell profile native library additions
  697. ## @audience private
  698. ## @stability evolving
  699. ## @replaceable yes
  700. function hadoop_shellprofiles_nativelib
  701. {
  702. local i
  703. for i in ${HADOOP_SHELL_PROFILES}
  704. do
  705. if declare -F _${i}_hadoop_nativelib >/dev/null ; then
  706. hadoop_debug "Profiles: ${i} nativelib"
  707. # shellcheck disable=SC2086
  708. _${i}_hadoop_nativelib
  709. fi
  710. done
  711. }
  712. ## @description Apply the shell profile final configuration
  713. ## @audience private
  714. ## @stability evolving
  715. ## @replaceable yes
  716. function hadoop_shellprofiles_finalize
  717. {
  718. local i
  719. for i in ${HADOOP_SHELL_PROFILES}
  720. do
  721. if declare -F _${i}_hadoop_finalize >/dev/null ; then
  722. hadoop_debug "Profiles: ${i} finalize"
  723. # shellcheck disable=SC2086
  724. _${i}_hadoop_finalize
  725. fi
  726. done
  727. }
  728. ## @description Initialize the Hadoop shell environment, now that
  729. ## @description user settings have been imported
  730. ## @audience private
  731. ## @stability evolving
  732. ## @replaceable no
  733. function hadoop_basic_init
  734. {
  735. # Some of these are also set in hadoop-env.sh.
  736. # we still set them here just in case hadoop-env.sh is
  737. # broken in some way, set up defaults, etc.
  738. #
  739. # but it is important to note that if you update these
  740. # you also need to update hadoop-env.sh as well!!!
  741. CLASSPATH=""
  742. hadoop_debug "Initialize CLASSPATH"
  743. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  744. [[ -d "${HADOOP_HOME}/${HADOOP_COMMON_DIR}" ]]; then
  745. export HADOOP_COMMON_HOME="${HADOOP_HOME}"
  746. fi
  747. # default policy file for service-level authorization
  748. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  749. # define HADOOP_HDFS_HOME
  750. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  751. [[ -d "${HADOOP_HOME}/${HDFS_DIR}" ]]; then
  752. export HADOOP_HDFS_HOME="${HADOOP_HOME}"
  753. fi
  754. # define HADOOP_YARN_HOME
  755. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  756. [[ -d "${HADOOP_HOME}/${YARN_DIR}" ]]; then
  757. export HADOOP_YARN_HOME="${HADOOP_HOME}"
  758. fi
  759. # define HADOOP_MAPRED_HOME
  760. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  761. [[ -d "${HADOOP_HOME}/${MAPRED_DIR}" ]]; then
  762. export HADOOP_MAPRED_HOME="${HADOOP_HOME}"
  763. fi
  764. if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
  765. hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
  766. exit 1
  767. fi
  768. if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
  769. hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
  770. exit 1
  771. fi
  772. if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
  773. hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
  774. exit 1
  775. fi
  776. if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
  777. hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
  778. exit 1
  779. fi
  780. # if for some reason the shell doesn't have $USER defined
  781. # (e.g., ssh'd in to execute a command)
  782. # let's get the effective username and use that
  783. USER=${USER:-$(id -nu)}
  784. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  785. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_HOME}/logs"}
  786. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  787. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  788. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  789. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  790. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  791. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  792. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  793. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  794. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  795. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  796. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  797. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  798. }
  799. ## @description Set the worker support information to the contents
  800. ## @description of `filename`
  801. ## @audience public
  802. ## @stability stable
  803. ## @replaceable no
  804. ## @param filename
  805. ## @return will exit if file does not exist
  806. function hadoop_populate_workers_file
  807. {
  808. local workersfile=$1
  809. shift
  810. if [[ -f "${workersfile}" ]]; then
  811. HADOOP_WORKERS="${workersfile}"
  812. elif [[ -f "${HADOOP_CONF_DIR}/${workersfile}" ]]; then
  813. HADOOP_WORKERS="${HADOOP_CONF_DIR}/${workersfile}"
  814. else
  815. hadoop_error "ERROR: Cannot find hosts file \"${workersfile}\""
  816. hadoop_exit_with_usage 1
  817. fi
  818. }
  819. ## @description Rotates the given `file` until `number` of
  820. ## @description files exist.
  821. ## @audience public
  822. ## @stability stable
  823. ## @replaceable no
  824. ## @param filename
  825. ## @param [number]
  826. ## @return $? will contain last mv's return value
  827. function hadoop_rotate_log
  828. {
  829. #
  830. # Users are likely to replace this one for something
  831. # that gzips or uses dates or who knows what.
  832. #
  833. # be aware that &1 and &2 might go through here
  834. # so don't do anything too crazy...
  835. #
  836. local log=$1;
  837. local num=${2:-5};
  838. if [[ -f "${log}" ]]; then # rotate logs
  839. while [[ ${num} -gt 1 ]]; do
  840. #shellcheck disable=SC2086
  841. let prev=${num}-1
  842. if [[ -f "${log}.${prev}" ]]; then
  843. mv "${log}.${prev}" "${log}.${num}"
  844. fi
  845. num=${prev}
  846. done
  847. mv "${log}" "${log}.${num}"
  848. fi
  849. }
  850. ## @description Via ssh, log into `hostname` and run `command`
  851. ## @audience private
  852. ## @stability evolving
  853. ## @replaceable yes
  854. ## @param hostname
  855. ## @param command
  856. ## @param [...]
  857. function hadoop_actual_ssh
  858. {
  859. # we are passing this function to xargs
  860. # should get hostname followed by rest of command line
  861. local worker=$1
  862. shift
  863. # shellcheck disable=SC2086
  864. ssh ${HADOOP_SSH_OPTS} ${worker} $"${@// /\\ }" 2>&1 | sed "s/^/$worker: /"
  865. }
  866. ## @description Connect to ${HADOOP_WORKERS} or ${HADOOP_WORKER_NAMES}
  867. ## @description and execute command.
  868. ## @audience private
  869. ## @stability evolving
  870. ## @replaceable yes
  871. ## @param command
  872. ## @param [...]
  873. function hadoop_connect_to_hosts
  874. {
  875. # shellcheck disable=SC2124
  876. local params="$@"
  877. local worker_file
  878. local tmpslvnames
  879. #
  880. # ssh (or whatever) to a host
  881. #
  882. # User can specify hostnames or a file where the hostnames are (not both)
  883. if [[ -n "${HADOOP_WORKERS}" && -n "${HADOOP_WORKER_NAMES}" ]] ; then
  884. hadoop_error "ERROR: Both HADOOP_WORKERS and HADOOP_WORKER_NAMES were defined. Aborting."
  885. exit 1
  886. elif [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  887. if [[ -n "${HADOOP_WORKERS}" ]]; then
  888. worker_file=${HADOOP_WORKERS}
  889. elif [[ -f "${HADOOP_CONF_DIR}/workers" ]]; then
  890. worker_file=${HADOOP_CONF_DIR}/workers
  891. elif [[ -f "${HADOOP_CONF_DIR}/slaves" ]]; then
  892. hadoop_error "WARNING: 'slaves' file has been deprecated. Please use 'workers' file instead."
  893. worker_file=${HADOOP_CONF_DIR}/slaves
  894. fi
  895. fi
  896. # if pdsh is available, let's use it. otherwise default
  897. # to a loop around ssh. (ugh)
  898. if [[ -e '/usr/bin/pdsh' ]]; then
  899. if [[ -z "${HADOOP_WORKER_NAMES}" ]] ; then
  900. # if we were given a file, just let pdsh deal with it.
  901. # shellcheck disable=SC2086
  902. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  903. -f "${HADOOP_SSH_PARALLEL}" -w ^"${worker_file}" $"${@// /\\ }" 2>&1
  904. else
  905. # no spaces allowed in the pdsh arg host list
  906. # shellcheck disable=SC2086
  907. tmpslvnames=$(echo ${HADOOP_WORKER_NAMES} | tr -s ' ' ,)
  908. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  909. -f "${HADOOP_SSH_PARALLEL}" \
  910. -w "${tmpslvnames}" $"${@// /\\ }" 2>&1
  911. fi
  912. else
  913. if [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  914. HADOOP_WORKER_NAMES=$(sed 's/#.*$//;/^$/d' "${worker_file}")
  915. fi
  916. hadoop_connect_to_hosts_without_pdsh "${params}"
  917. fi
  918. }
  919. ## @description Connect to ${HADOOP_WORKER_NAMES} and execute command
  920. ## @description under the environment which does not support pdsh.
  921. ## @audience private
  922. ## @stability evolving
  923. ## @replaceable yes
  924. ## @param command
  925. ## @param [...]
  926. function hadoop_connect_to_hosts_without_pdsh
  927. {
  928. # shellcheck disable=SC2124
  929. local params="$@"
  930. local workers=(${HADOOP_WORKER_NAMES})
  931. for (( i = 0; i < ${#workers[@]}; i++ ))
  932. do
  933. if (( i != 0 && i % HADOOP_SSH_PARALLEL == 0 )); then
  934. wait
  935. fi
  936. # shellcheck disable=SC2086
  937. hadoop_actual_ssh "${workers[$i]}" ${params} &
  938. done
  939. wait
  940. }
  941. ## @description Utility routine to handle --workers mode
  942. ## @audience private
  943. ## @stability evolving
  944. ## @replaceable yes
  945. ## @param commandarray
  946. function hadoop_common_worker_mode_execute
  947. {
  948. #
  949. # input should be the command line as given by the user
  950. # in the form of an array
  951. #
  952. local argv=("$@")
  953. # if --workers is still on the command line, remove it
  954. # to prevent loops
  955. # Also remove --hostnames and --hosts along with arg values
  956. local argsSize=${#argv[@]};
  957. for (( i = 0; i < argsSize; i++ ))
  958. do
  959. if [[ "${argv[$i]}" =~ ^--workers$ ]]; then
  960. unset argv[$i]
  961. elif [[ "${argv[$i]}" =~ ^--hostnames$ ]] ||
  962. [[ "${argv[$i]}" =~ ^--hosts$ ]]; then
  963. unset argv[$i];
  964. let i++;
  965. unset argv[$i];
  966. fi
  967. done
  968. if [[ ${QATESTMODE} = true ]]; then
  969. echo "${argv[@]}"
  970. return
  971. fi
  972. hadoop_connect_to_hosts -- "${argv[@]}"
  973. }
  974. ## @description Verify that a shell command was passed a valid
  975. ## @description class name
  976. ## @audience public
  977. ## @stability stable
  978. ## @replaceable yes
  979. ## @param classname
  980. ## @return 0 = success
  981. ## @return 1 = failure w/user message
  982. function hadoop_validate_classname
  983. {
  984. local class=$1
  985. shift 1
  986. if [[ ! ${class} =~ \. ]]; then
  987. # assuming the arg is typo of command if it does not conatain ".".
  988. # class belonging to no package is not allowed as a result.
  989. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  990. return 1
  991. fi
  992. return 0
  993. }
  994. ## @description Append the `appendstring` if `checkstring` is not
  995. ## @description present in the given `envvar`
  996. ## @audience public
  997. ## @stability stable
  998. ## @replaceable yes
  999. ## @param envvar
  1000. ## @param checkstring
  1001. ## @param appendstring
  1002. function hadoop_add_param
  1003. {
  1004. #
  1005. # general param dedupe..
  1006. # $1 is what we are adding to
  1007. # $2 is the name of what we want to add (key)
  1008. # $3 is the key+value of what we're adding
  1009. #
  1010. # doing it this way allows us to support all sorts of
  1011. # different syntaxes, just so long as they are space
  1012. # delimited
  1013. #
  1014. if [[ ! ${!1} =~ $2 ]] ; then
  1015. #shellcheck disable=SC2140
  1016. eval "$1"="'${!1} $3'"
  1017. if [[ ${!1:0:1} = ' ' ]]; then
  1018. #shellcheck disable=SC2140
  1019. eval "$1"="'${!1# }'"
  1020. fi
  1021. hadoop_debug "$1 accepted $3"
  1022. else
  1023. hadoop_debug "$1 declined $3"
  1024. fi
  1025. }
  1026. ## @description Register the given `shellprofile` to the Hadoop
  1027. ## @description shell subsystem
  1028. ## @audience public
  1029. ## @stability stable
  1030. ## @replaceable yes
  1031. ## @param shellprofile
  1032. function hadoop_add_profile
  1033. {
  1034. # shellcheck disable=SC2086
  1035. hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
  1036. }
  1037. ## @description Add a file system object (directory, file,
  1038. ## @description wildcard, ...) to the classpath. Optionally provide
  1039. ## @description a hint as to where in the classpath it should go.
  1040. ## @audience public
  1041. ## @stability stable
  1042. ## @replaceable yes
  1043. ## @param object
  1044. ## @param [before|after]
  1045. ## @return 0 = success (added or duplicate)
  1046. ## @return 1 = failure (doesn't exist or some other reason)
  1047. function hadoop_add_classpath
  1048. {
  1049. # However, with classpath (& JLP), we can do dedupe
  1050. # along with some sanity checking (e.g., missing directories)
  1051. # since we have a better idea of what is legal
  1052. #
  1053. # for wildcard at end, we can
  1054. # at least check the dir exists
  1055. if [[ $1 =~ ^.*\*$ ]]; then
  1056. local mp
  1057. mp=$(dirname "$1")
  1058. if [[ ! -d "${mp}" ]]; then
  1059. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  1060. return 1
  1061. fi
  1062. # no wildcard in the middle, so check existence
  1063. # (doesn't matter *what* it is)
  1064. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  1065. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  1066. return 1
  1067. fi
  1068. if [[ -z "${CLASSPATH}" ]]; then
  1069. CLASSPATH=$1
  1070. hadoop_debug "Initial CLASSPATH=$1"
  1071. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  1072. if [[ "$2" = "before" ]]; then
  1073. CLASSPATH="$1:${CLASSPATH}"
  1074. hadoop_debug "Prepend CLASSPATH: $1"
  1075. else
  1076. CLASSPATH+=:$1
  1077. hadoop_debug "Append CLASSPATH: $1"
  1078. fi
  1079. else
  1080. hadoop_debug "Dupe CLASSPATH: $1"
  1081. fi
  1082. return 0
  1083. }
  1084. ## @description Add a file system object (directory, file,
  1085. ## @description wildcard, ...) to the colonpath. Optionally provide
  1086. ## @description a hint as to where in the colonpath it should go.
  1087. ## @description Prior to adding, objects are checked for duplication
  1088. ## @description and check for existence. Many other functions use
  1089. ## @description this function as their base implementation
  1090. ## @description including `hadoop_add_javalibpath` and `hadoop_add_ldlibpath`.
  1091. ## @audience public
  1092. ## @stability stable
  1093. ## @replaceable yes
  1094. ## @param envvar
  1095. ## @param object
  1096. ## @param [before|after]
  1097. ## @return 0 = success (added or duplicate)
  1098. ## @return 1 = failure (doesn't exist or some other reason)
  1099. function hadoop_add_colonpath
  1100. {
  1101. # this is CLASSPATH, JLP, etc but with dedupe but no
  1102. # other checking
  1103. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  1104. if [[ -z "${!1}" ]]; then
  1105. # shellcheck disable=SC2086
  1106. eval $1="'$2'"
  1107. hadoop_debug "Initial colonpath($1): $2"
  1108. elif [[ "$3" = "before" ]]; then
  1109. # shellcheck disable=SC2086
  1110. eval $1="'$2:${!1}'"
  1111. hadoop_debug "Prepend colonpath($1): $2"
  1112. else
  1113. # shellcheck disable=SC2086
  1114. eval $1+=":'$2'"
  1115. hadoop_debug "Append colonpath($1): $2"
  1116. fi
  1117. return 0
  1118. fi
  1119. hadoop_debug "Rejected colonpath($1): $2"
  1120. return 1
  1121. }
  1122. ## @description Add a file system object (directory, file,
  1123. ## @description wildcard, ...) to the Java JNI path. Optionally
  1124. ## @description provide a hint as to where in the Java JNI path
  1125. ## @description it should go.
  1126. ## @audience public
  1127. ## @stability stable
  1128. ## @replaceable yes
  1129. ## @param object
  1130. ## @param [before|after]
  1131. ## @return 0 = success (added or duplicate)
  1132. ## @return 1 = failure (doesn't exist or some other reason)
  1133. function hadoop_add_javalibpath
  1134. {
  1135. # specialized function for a common use case
  1136. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  1137. }
  1138. ## @description Add a file system object (directory, file,
  1139. ## @description wildcard, ...) to the LD_LIBRARY_PATH. Optionally
  1140. ## @description provide a hint as to where in the LD_LIBRARY_PATH
  1141. ## @description it should go.
  1142. ## @audience public
  1143. ## @stability stable
  1144. ## @replaceable yes
  1145. ## @param object
  1146. ## @param [before|after]
  1147. ## @return 0 = success (added or duplicate)
  1148. ## @return 1 = failure (doesn't exist or some other reason)
  1149. function hadoop_add_ldlibpath
  1150. {
  1151. local status
  1152. # specialized function for a common use case
  1153. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  1154. status=$?
  1155. # note that we export this
  1156. export LD_LIBRARY_PATH
  1157. return ${status}
  1158. }
  1159. ## @description Add the common/core Hadoop components to the
  1160. ## @description environment
  1161. ## @audience private
  1162. ## @stability evolving
  1163. ## @replaceable yes
  1164. ## @returns 1 on failure, may exit
  1165. ## @returns 0 on success
  1166. function hadoop_add_common_to_classpath
  1167. {
  1168. #
  1169. # get all of the common jars+config in the path
  1170. #
  1171. if [[ -z "${HADOOP_COMMON_HOME}"
  1172. || -z "${HADOOP_COMMON_DIR}"
  1173. || -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
  1174. hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
  1175. hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
  1176. hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
  1177. hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
  1178. exit 1
  1179. fi
  1180. # developers
  1181. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  1182. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  1183. fi
  1184. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  1185. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  1186. }
  1187. ## @description Run libexec/tools/module.sh to add to the classpath
  1188. ## @description environment
  1189. ## @audience private
  1190. ## @stability evolving
  1191. ## @replaceable yes
  1192. ## @param module
  1193. function hadoop_add_to_classpath_tools
  1194. {
  1195. declare module=$1
  1196. if [[ -f "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh" ]]; then
  1197. # shellcheck disable=SC1090
  1198. . "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh"
  1199. else
  1200. hadoop_error "ERROR: Tools helper ${HADOOP_LIBEXEC_DIR}/tools/${module}.sh was not found."
  1201. fi
  1202. if declare -f hadoop_classpath_tools_${module} >/dev/null 2>&1; then
  1203. "hadoop_classpath_tools_${module}"
  1204. fi
  1205. }
  1206. ## @description Add the user's custom classpath settings to the
  1207. ## @description environment
  1208. ## @audience private
  1209. ## @stability evolving
  1210. ## @replaceable yes
  1211. function hadoop_add_to_classpath_userpath
  1212. {
  1213. # Add the user-specified HADOOP_CLASSPATH to the
  1214. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  1215. # is not set.
  1216. # Add it first or last depending on if user has
  1217. # set env-var HADOOP_USER_CLASSPATH_FIRST
  1218. # we'll also dedupe it, because we're cool like that.
  1219. #
  1220. declare -a array
  1221. declare -i c=0
  1222. declare -i j
  1223. declare -i i
  1224. declare idx
  1225. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  1226. # I wonder if Java runs on VMS.
  1227. for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  1228. array[${c}]=${idx}
  1229. ((c=c+1))
  1230. done
  1231. # bats gets confused by j getting set to 0
  1232. ((j=c-1)) || ${QATESTMODE}
  1233. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  1234. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  1235. for ((i=0; i<=j; i++)); do
  1236. hadoop_add_classpath "${array[$i]}" after
  1237. done
  1238. else
  1239. for ((i=j; i>=0; i--)); do
  1240. hadoop_add_classpath "${array[$i]}" before
  1241. done
  1242. fi
  1243. fi
  1244. fi
  1245. }
  1246. ## @description Routine to configure any OS-specific settings.
  1247. ## @audience public
  1248. ## @stability stable
  1249. ## @replaceable yes
  1250. ## @return may exit on failure conditions
  1251. function hadoop_os_tricks
  1252. {
  1253. local bindv6only
  1254. HADOOP_IS_CYGWIN=false
  1255. case ${HADOOP_OS_TYPE} in
  1256. Darwin)
  1257. if [[ -z "${JAVA_HOME}" ]]; then
  1258. if [[ -x /usr/libexec/java_home ]]; then
  1259. JAVA_HOME="$(/usr/libexec/java_home)"
  1260. export JAVA_HOME
  1261. else
  1262. JAVA_HOME=/Library/Java/Home
  1263. export JAVA_HOME
  1264. fi
  1265. fi
  1266. ;;
  1267. Linux)
  1268. # Newer versions of glibc use an arena memory allocator that
  1269. # causes virtual # memory usage to explode. This interacts badly
  1270. # with the many threads that we use in Hadoop. Tune the variable
  1271. # down to prevent vmem explosion.
  1272. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  1273. # we put this in QA test mode off so that non-Linux can test
  1274. if [[ "${QATESTMODE}" = true ]]; then
  1275. return
  1276. fi
  1277. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  1278. # undocumented in hadoop-env.sh because we don't want users to
  1279. # shoot themselves in the foot while devs make IPv6 work.
  1280. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  1281. if [[ -n "${bindv6only}" ]] &&
  1282. [[ "${bindv6only}" -eq "1" ]] &&
  1283. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  1284. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  1285. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  1286. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  1287. exit 1
  1288. fi
  1289. ;;
  1290. CYGWIN*)
  1291. # Flag that we're running on Cygwin to trigger path translation later.
  1292. HADOOP_IS_CYGWIN=true
  1293. ;;
  1294. esac
  1295. }
  1296. ## @description Configure/verify ${JAVA_HOME}
  1297. ## @audience public
  1298. ## @stability stable
  1299. ## @replaceable yes
  1300. ## @return may exit on failure conditions
  1301. function hadoop_java_setup
  1302. {
  1303. # Bail if we did not detect it
  1304. if [[ -z "${JAVA_HOME}" ]]; then
  1305. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  1306. exit 1
  1307. fi
  1308. if [[ ! -d "${JAVA_HOME}" ]]; then
  1309. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  1310. exit 1
  1311. fi
  1312. JAVA="${JAVA_HOME}/bin/java"
  1313. if [[ ! -x "$JAVA" ]]; then
  1314. hadoop_error "ERROR: $JAVA is not executable."
  1315. exit 1
  1316. fi
  1317. }
  1318. ## @description Finish Java JNI paths prior to execution
  1319. ## @audience private
  1320. ## @stability evolving
  1321. ## @replaceable yes
  1322. function hadoop_finalize_libpaths
  1323. {
  1324. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1325. hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
  1326. hadoop_add_param HADOOP_OPTS java.library.path \
  1327. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1328. export LD_LIBRARY_PATH
  1329. fi
  1330. }
  1331. ## @description Finish Java heap parameters prior to execution
  1332. ## @audience private
  1333. ## @stability evolving
  1334. ## @replaceable yes
  1335. function hadoop_finalize_hadoop_heap
  1336. {
  1337. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  1338. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  1339. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  1340. fi
  1341. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  1342. fi
  1343. # backwards compatibility
  1344. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  1345. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  1346. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  1347. fi
  1348. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  1349. fi
  1350. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  1351. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  1352. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  1353. fi
  1354. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  1355. fi
  1356. }
  1357. ## @description Converts the contents of the variable name
  1358. ## @description `varnameref` into the equivalent Windows path.
  1359. ## @description If the second parameter is true, then `varnameref`
  1360. ## @description is treated as though it was a path list.
  1361. ## @audience public
  1362. ## @stability stable
  1363. ## @replaceable yes
  1364. ## @param varnameref
  1365. ## @param [true]
  1366. function hadoop_translate_cygwin_path
  1367. {
  1368. if [[ "${HADOOP_IS_CYGWIN}" = "true" ]]; then
  1369. if [[ "$2" = "true" ]]; then
  1370. #shellcheck disable=SC2016
  1371. eval "$1"='$(cygpath -p -w "${!1}" 2>/dev/null)'
  1372. else
  1373. #shellcheck disable=SC2016
  1374. eval "$1"='$(cygpath -w "${!1}" 2>/dev/null)'
  1375. fi
  1376. fi
  1377. }
  1378. ## @description Adds the HADOOP_CLIENT_OPTS variable to
  1379. ## @description HADOOP_OPTS if HADOOP_SUBCMD_SUPPORTDAEMONIZATION is false
  1380. ## @audience public
  1381. ## @stability stable
  1382. ## @replaceable yes
  1383. function hadoop_add_client_opts
  1384. {
  1385. if [[ "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" = false
  1386. || -z "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
  1387. hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
  1388. HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
  1389. fi
  1390. }
  1391. ## @description Finish configuring Hadoop specific system properties
  1392. ## @description prior to executing Java
  1393. ## @audience private
  1394. ## @stability evolving
  1395. ## @replaceable yes
  1396. function hadoop_finalize_hadoop_opts
  1397. {
  1398. hadoop_translate_cygwin_path HADOOP_LOG_DIR
  1399. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  1400. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  1401. hadoop_translate_cygwin_path HADOOP_HOME
  1402. export HADOOP_HOME
  1403. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1404. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  1405. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  1406. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  1407. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  1408. }
  1409. ## @description Finish Java classpath prior to execution
  1410. ## @audience private
  1411. ## @stability evolving
  1412. ## @replaceable yes
  1413. function hadoop_finalize_classpath
  1414. {
  1415. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  1416. # user classpath gets added at the last minute. this allows
  1417. # override of CONF dirs and more
  1418. hadoop_add_to_classpath_userpath
  1419. hadoop_translate_cygwin_path CLASSPATH true
  1420. }
  1421. ## @description Finish all the remaining environment settings prior
  1422. ## @description to executing Java. This is a wrapper that calls
  1423. ## @description the other `finalize` routines.
  1424. ## @audience private
  1425. ## @stability evolving
  1426. ## @replaceable yes
  1427. function hadoop_finalize
  1428. {
  1429. hadoop_shellprofiles_finalize
  1430. hadoop_finalize_classpath
  1431. hadoop_finalize_libpaths
  1432. hadoop_finalize_hadoop_heap
  1433. hadoop_finalize_hadoop_opts
  1434. hadoop_translate_cygwin_path HADOOP_HOME
  1435. hadoop_translate_cygwin_path HADOOP_CONF_DIR
  1436. hadoop_translate_cygwin_path HADOOP_COMMON_HOME
  1437. hadoop_translate_cygwin_path HADOOP_HDFS_HOME
  1438. hadoop_translate_cygwin_path HADOOP_YARN_HOME
  1439. hadoop_translate_cygwin_path HADOOP_MAPRED_HOME
  1440. }
  1441. ## @description Print usage information and exit with the passed
  1442. ## @description `exitcode`
  1443. ## @audience public
  1444. ## @stability stable
  1445. ## @replaceable no
  1446. ## @param exitcode
  1447. ## @return This function will always exit.
  1448. function hadoop_exit_with_usage
  1449. {
  1450. local exitcode=$1
  1451. if [[ -z $exitcode ]]; then
  1452. exitcode=1
  1453. fi
  1454. # shellcheck disable=SC2034
  1455. if declare -F hadoop_usage >/dev/null ; then
  1456. hadoop_usage
  1457. elif [[ -x /usr/bin/cowsay ]]; then
  1458. /usr/bin/cowsay -f elephant "Sorry, no help available."
  1459. else
  1460. hadoop_error "Sorry, no help available."
  1461. fi
  1462. exit $exitcode
  1463. }
  1464. ## @description Verify that prerequisites have been met prior to
  1465. ## @description excuting a privileged program.
  1466. ## @audience private
  1467. ## @stability evolving
  1468. ## @replaceable yes
  1469. ## @return This routine may exit.
  1470. function hadoop_verify_secure_prereq
  1471. {
  1472. # if you are on an OS like Illumos that has functional roles
  1473. # and you are using pfexec, you'll probably want to change
  1474. # this.
  1475. if ! hadoop_privilege_check && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  1476. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  1477. exit 1
  1478. else
  1479. return 0
  1480. fi
  1481. }
  1482. ## @audience private
  1483. ## @stability evolving
  1484. ## @replaceable yes
  1485. function hadoop_setup_secure_service
  1486. {
  1487. # need a more complicated setup? replace me!
  1488. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  1489. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  1490. }
  1491. ## @audience private
  1492. ## @stability evolving
  1493. ## @replaceable yes
  1494. function hadoop_verify_piddir
  1495. {
  1496. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  1497. hadoop_error "No pid directory defined."
  1498. exit 1
  1499. fi
  1500. hadoop_mkdir "${HADOOP_PID_DIR}"
  1501. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1502. if [[ $? -gt 0 ]]; then
  1503. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  1504. exit 1
  1505. fi
  1506. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1507. }
  1508. ## @audience private
  1509. ## @stability evolving
  1510. ## @replaceable yes
  1511. function hadoop_verify_logdir
  1512. {
  1513. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  1514. hadoop_error "No log directory defined."
  1515. exit 1
  1516. fi
  1517. hadoop_mkdir "${HADOOP_LOG_DIR}"
  1518. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1519. if [[ $? -gt 0 ]]; then
  1520. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  1521. exit 1
  1522. fi
  1523. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1524. }
  1525. ## @description Determine the status of the daemon referenced
  1526. ## @description by `pidfile`
  1527. ## @audience public
  1528. ## @stability stable
  1529. ## @replaceable yes
  1530. ## @param pidfile
  1531. ## @return (mostly) LSB 4.1.0 compatible status
  1532. function hadoop_status_daemon
  1533. {
  1534. #
  1535. # LSB 4.1.0 compatible status command (1)
  1536. #
  1537. # 0 = program is running
  1538. # 1 = dead, but still a pid (2)
  1539. # 2 = (not used by us)
  1540. # 3 = not running
  1541. #
  1542. # 1 - this is not an endorsement of the LSB
  1543. #
  1544. # 2 - technically, the specification says /var/run/pid, so
  1545. # we should never return this value, but we're giving
  1546. # them the benefit of a doubt and returning 1 even if
  1547. # our pid is not in in /var/run .
  1548. #
  1549. local pidfile=$1
  1550. shift
  1551. local pid
  1552. local pspid
  1553. if [[ -f "${pidfile}" ]]; then
  1554. pid=$(cat "${pidfile}")
  1555. if pspid=$(ps -o args= -p"${pid}" 2>/dev/null); then
  1556. # this is to check that the running process we found is actually the same
  1557. # daemon that we're interested in
  1558. if [[ ${pspid} =~ -Dproc_${daemonname} ]]; then
  1559. return 0
  1560. fi
  1561. fi
  1562. return 1
  1563. fi
  1564. return 3
  1565. }
  1566. ## @description Execute the Java `class`, passing along any `options`.
  1567. ## @description Additionally, set the Java property -Dproc_`command`.
  1568. ## @audience public
  1569. ## @stability stable
  1570. ## @replaceable yes
  1571. ## @param command
  1572. ## @param class
  1573. ## @param [options]
  1574. function hadoop_java_exec
  1575. {
  1576. # run a java command. this is used for
  1577. # non-daemons
  1578. local command=$1
  1579. local class=$2
  1580. shift 2
  1581. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1582. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1583. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1584. hadoop_debug "java: ${JAVA}"
  1585. hadoop_debug "Class name: ${class}"
  1586. hadoop_debug "Command line options: $*"
  1587. export CLASSPATH
  1588. #shellcheck disable=SC2086
  1589. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1590. }
  1591. ## @description Start a non-privileged daemon in the foreground.
  1592. ## @audience private
  1593. ## @stability evolving
  1594. ## @replaceable yes
  1595. ## @param command
  1596. ## @param class
  1597. ## @param pidfile
  1598. ## @param [options]
  1599. function hadoop_start_daemon
  1600. {
  1601. # this is our non-privileged daemon starter
  1602. # that fires up a daemon in the *foreground*
  1603. # so complex! so wow! much java!
  1604. local command=$1
  1605. local class=$2
  1606. local pidfile=$3
  1607. shift 3
  1608. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1609. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1610. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1611. hadoop_debug "java: ${JAVA}"
  1612. hadoop_debug "Class name: ${class}"
  1613. hadoop_debug "Command line options: $*"
  1614. # this is for the non-daemon pid creation
  1615. #shellcheck disable=SC2086
  1616. echo $$ > "${pidfile}" 2>/dev/null
  1617. if [[ $? -gt 0 ]]; then
  1618. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  1619. fi
  1620. export CLASSPATH
  1621. #shellcheck disable=SC2086
  1622. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1623. }
  1624. ## @description Start a non-privileged daemon in the background.
  1625. ## @audience private
  1626. ## @stability evolving
  1627. ## @replaceable yes
  1628. ## @param command
  1629. ## @param class
  1630. ## @param pidfile
  1631. ## @param outfile
  1632. ## @param [options]
  1633. function hadoop_start_daemon_wrapper
  1634. {
  1635. local daemonname=$1
  1636. local class=$2
  1637. local pidfile=$3
  1638. local outfile=$4
  1639. shift 4
  1640. local counter
  1641. hadoop_rotate_log "${outfile}"
  1642. hadoop_start_daemon "${daemonname}" \
  1643. "$class" \
  1644. "${pidfile}" \
  1645. "$@" >> "${outfile}" 2>&1 < /dev/null &
  1646. # we need to avoid a race condition here
  1647. # so let's wait for the fork to finish
  1648. # before overriding with the daemonized pid
  1649. (( counter=0 ))
  1650. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  1651. sleep 1
  1652. (( counter++ ))
  1653. done
  1654. # this is for daemon pid creation
  1655. #shellcheck disable=SC2086
  1656. echo $! > "${pidfile}" 2>/dev/null
  1657. if [[ $? -gt 0 ]]; then
  1658. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  1659. fi
  1660. # shellcheck disable=SC2086
  1661. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1662. if [[ $? -gt 0 ]]; then
  1663. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1664. fi
  1665. # shellcheck disable=SC2086
  1666. disown %+ >/dev/null 2>&1
  1667. if [[ $? -gt 0 ]]; then
  1668. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1669. fi
  1670. sleep 1
  1671. # capture the ulimit output
  1672. ulimit -a >> "${outfile}" 2>&1
  1673. # shellcheck disable=SC2086
  1674. if ! ps -p $! >/dev/null 2>&1; then
  1675. return 1
  1676. fi
  1677. return 0
  1678. }
  1679. ## @description Start a privileged daemon in the foreground.
  1680. ## @audience private
  1681. ## @stability evolving
  1682. ## @replaceable yes
  1683. ## @param command
  1684. ## @param class
  1685. ## @param daemonpidfile
  1686. ## @param daemonoutfile
  1687. ## @param daemonerrfile
  1688. ## @param wrapperpidfile
  1689. ## @param [options]
  1690. function hadoop_start_secure_daemon
  1691. {
  1692. # this is used to launch a secure daemon in the *foreground*
  1693. #
  1694. local daemonname=$1
  1695. local class=$2
  1696. # pid file to create for our daemon
  1697. local daemonpidfile=$3
  1698. # where to send stdout. jsvc has bad habits so this *may* be &1
  1699. # which means you send it to stdout!
  1700. local daemonoutfile=$4
  1701. # where to send stderr. same thing, except &2 = stderr
  1702. local daemonerrfile=$5
  1703. local privpidfile=$6
  1704. shift 6
  1705. hadoop_rotate_log "${daemonoutfile}"
  1706. hadoop_rotate_log "${daemonerrfile}"
  1707. # shellcheck disable=SC2153
  1708. jsvc="${JSVC_HOME}/jsvc"
  1709. if [[ ! -f "${jsvc}" ]]; then
  1710. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  1711. hadoop_error "or privileged daemons. Please download and install jsvc from "
  1712. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  1713. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  1714. exit 1
  1715. fi
  1716. # note that shellcheck will throw a
  1717. # bogus for-our-use-case 2086 here.
  1718. # it doesn't properly support multi-line situations
  1719. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1720. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1721. hadoop_debug "Final JSVC_HOME: ${JSVC_HOME}"
  1722. hadoop_debug "jsvc: ${jsvc}"
  1723. hadoop_debug "Class name: ${class}"
  1724. hadoop_debug "Command line options: $*"
  1725. #shellcheck disable=SC2086
  1726. echo $$ > "${privpidfile}" 2>/dev/null
  1727. if [[ $? -gt 0 ]]; then
  1728. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  1729. fi
  1730. # shellcheck disable=SC2086
  1731. exec "${jsvc}" \
  1732. "-Dproc_${daemonname}" \
  1733. -outfile "${daemonoutfile}" \
  1734. -errfile "${daemonerrfile}" \
  1735. -pidfile "${daemonpidfile}" \
  1736. -nodetach \
  1737. -user "${HADOOP_SECURE_USER}" \
  1738. -cp "${CLASSPATH}" \
  1739. ${HADOOP_OPTS} \
  1740. "${class}" "$@"
  1741. }
  1742. ## @description Start a privileged daemon in the background.
  1743. ## @audience private
  1744. ## @stability evolving
  1745. ## @replaceable yes
  1746. ## @param command
  1747. ## @param class
  1748. ## @param daemonpidfile
  1749. ## @param daemonoutfile
  1750. ## @param wrapperpidfile
  1751. ## @param warpperoutfile
  1752. ## @param daemonerrfile
  1753. ## @param [options]
  1754. function hadoop_start_secure_daemon_wrapper
  1755. {
  1756. # this wraps hadoop_start_secure_daemon to take care
  1757. # of the dirty work to launch a daemon in the background!
  1758. local daemonname=$1
  1759. local class=$2
  1760. # same rules as hadoop_start_secure_daemon except we
  1761. # have some additional parameters
  1762. local daemonpidfile=$3
  1763. local daemonoutfile=$4
  1764. # the pid file of the subprocess that spawned our
  1765. # secure launcher
  1766. local jsvcpidfile=$5
  1767. # the output of the subprocess that spawned our secure
  1768. # launcher
  1769. local jsvcoutfile=$6
  1770. local daemonerrfile=$7
  1771. shift 7
  1772. local counter
  1773. hadoop_rotate_log "${jsvcoutfile}"
  1774. hadoop_start_secure_daemon \
  1775. "${daemonname}" \
  1776. "${class}" \
  1777. "${daemonpidfile}" \
  1778. "${daemonoutfile}" \
  1779. "${daemonerrfile}" \
  1780. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  1781. # we need to avoid a race condition here
  1782. # so let's wait for the fork to finish
  1783. # before overriding with the daemonized pid
  1784. (( counter=0 ))
  1785. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  1786. sleep 1
  1787. (( counter++ ))
  1788. done
  1789. #shellcheck disable=SC2086
  1790. if ! echo $! > "${jsvcpidfile}"; then
  1791. hadoop_error "ERROR: Cannot write ${daemonname} pid ${jsvcpidfile}."
  1792. fi
  1793. sleep 1
  1794. #shellcheck disable=SC2086
  1795. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1796. if [[ $? -gt 0 ]]; then
  1797. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1798. fi
  1799. if [[ -f "${daemonpidfile}" ]]; then
  1800. #shellcheck disable=SC2046
  1801. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  1802. if [[ $? -gt 0 ]]; then
  1803. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  1804. fi
  1805. fi
  1806. #shellcheck disable=SC2046
  1807. disown %+ >/dev/null 2>&1
  1808. if [[ $? -gt 0 ]]; then
  1809. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1810. fi
  1811. # capture the ulimit output
  1812. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  1813. #shellcheck disable=SC2086
  1814. if ! ps -p $! >/dev/null 2>&1; then
  1815. return 1
  1816. fi
  1817. return 0
  1818. }
  1819. ## @description Wait till process dies or till timeout
  1820. ## @audience private
  1821. ## @stability evolving
  1822. ## @param pid
  1823. ## @param timeout
  1824. function wait_process_to_die_or_timeout
  1825. {
  1826. local pid=$1
  1827. local timeout=$2
  1828. # Normalize timeout
  1829. # Round up or down
  1830. timeout=$(printf "%.0f\n" "${timeout}")
  1831. if [[ ${timeout} -lt 1 ]]; then
  1832. # minimum 1 second
  1833. timeout=1
  1834. fi
  1835. # Wait to see if it's still alive
  1836. for (( i=0; i < "${timeout}"; i++ ))
  1837. do
  1838. if kill -0 "${pid}" > /dev/null 2>&1; then
  1839. sleep 1
  1840. else
  1841. break
  1842. fi
  1843. done
  1844. }
  1845. ## @description Stop the non-privileged `command` daemon with that
  1846. ## @description that is running at `pidfile`.
  1847. ## @audience public
  1848. ## @stability stable
  1849. ## @replaceable yes
  1850. ## @param command
  1851. ## @param pidfile
  1852. function hadoop_stop_daemon
  1853. {
  1854. local cmd=$1
  1855. local pidfile=$2
  1856. shift 2
  1857. local pid
  1858. local cur_pid
  1859. if [[ -f "${pidfile}" ]]; then
  1860. pid=$(cat "$pidfile")
  1861. kill "${pid}" >/dev/null 2>&1
  1862. wait_process_to_die_or_timeout "${pid}" "${HADOOP_STOP_TIMEOUT}"
  1863. if kill -0 "${pid}" > /dev/null 2>&1; then
  1864. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  1865. kill -9 "${pid}" >/dev/null 2>&1
  1866. fi
  1867. wait_process_to_die_or_timeout "${pid}" "${HADOOP_STOP_TIMEOUT}"
  1868. if ps -p "${pid}" > /dev/null 2>&1; then
  1869. hadoop_error "ERROR: Unable to kill ${pid}"
  1870. else
  1871. cur_pid=$(cat "$pidfile")
  1872. if [[ "${pid}" = "${cur_pid}" ]]; then
  1873. rm -f "${pidfile}" >/dev/null 2>&1
  1874. else
  1875. hadoop_error "WARNING: pid has changed for ${cmd}, skip deleting pid file"
  1876. fi
  1877. fi
  1878. fi
  1879. }
  1880. ## @description Stop the privileged `command` daemon with that
  1881. ## @description that is running at `daemonpidfile` and launched with
  1882. ## @description the wrapper at `wrapperpidfile`.
  1883. ## @audience public
  1884. ## @stability stable
  1885. ## @replaceable yes
  1886. ## @param command
  1887. ## @param daemonpidfile
  1888. ## @param wrapperpidfile
  1889. function hadoop_stop_secure_daemon
  1890. {
  1891. local command=$1
  1892. local daemonpidfile=$2
  1893. local privpidfile=$3
  1894. shift 3
  1895. local ret
  1896. local daemon_pid
  1897. local priv_pid
  1898. local cur_daemon_pid
  1899. local cur_priv_pid
  1900. daemon_pid=$(cat "$daemonpidfile")
  1901. priv_pid=$(cat "$privpidfile")
  1902. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  1903. ret=$?
  1904. cur_daemon_pid=$(cat "$daemonpidfile")
  1905. cur_priv_pid=$(cat "$privpidfile")
  1906. if [[ "${daemon_pid}" = "${cur_daemon_pid}" ]]; then
  1907. rm -f "${daemonpidfile}" >/dev/null 2>&1
  1908. else
  1909. hadoop_error "WARNING: daemon pid has changed for ${command}, skip deleting daemon pid file"
  1910. fi
  1911. if [[ "${priv_pid}" = "${cur_priv_pid}" ]]; then
  1912. rm -f "${privpidfile}" >/dev/null 2>&1
  1913. else
  1914. hadoop_error "WARNING: priv pid has changed for ${command}, skip deleting priv pid file"
  1915. fi
  1916. return ${ret}
  1917. }
  1918. ## @description Manage a non-privileged daemon.
  1919. ## @audience private
  1920. ## @stability evolving
  1921. ## @replaceable yes
  1922. ## @param [start|stop|status|default]
  1923. ## @param command
  1924. ## @param class
  1925. ## @param daemonpidfile
  1926. ## @param daemonoutfile
  1927. ## @param [options]
  1928. function hadoop_daemon_handler
  1929. {
  1930. local daemonmode=$1
  1931. local daemonname=$2
  1932. local class=$3
  1933. local daemon_pidfile=$4
  1934. local daemon_outfile=$5
  1935. shift 5
  1936. case ${daemonmode} in
  1937. status)
  1938. hadoop_status_daemon "${daemon_pidfile}"
  1939. exit $?
  1940. ;;
  1941. stop)
  1942. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  1943. exit $?
  1944. ;;
  1945. ##COMPAT -- older hadoops would also start daemons by default
  1946. start|default)
  1947. hadoop_verify_piddir
  1948. hadoop_verify_logdir
  1949. hadoop_status_daemon "${daemon_pidfile}"
  1950. if [[ $? == 0 ]]; then
  1951. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1952. exit 1
  1953. else
  1954. # stale pid file, so just remove it and continue on
  1955. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1956. fi
  1957. ##COMPAT - differenticate between --daemon start and nothing
  1958. # "nothing" shouldn't detach
  1959. if [[ "$daemonmode" = "default" ]]; then
  1960. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  1961. else
  1962. hadoop_start_daemon_wrapper "${daemonname}" \
  1963. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  1964. fi
  1965. ;;
  1966. esac
  1967. }
  1968. ## @description Manage a privileged daemon.
  1969. ## @audience private
  1970. ## @stability evolving
  1971. ## @replaceable yes
  1972. ## @param [start|stop|status|default]
  1973. ## @param command
  1974. ## @param class
  1975. ## @param daemonpidfile
  1976. ## @param daemonoutfile
  1977. ## @param wrapperpidfile
  1978. ## @param wrapperoutfile
  1979. ## @param wrappererrfile
  1980. ## @param [options]
  1981. function hadoop_secure_daemon_handler
  1982. {
  1983. local daemonmode=$1
  1984. local daemonname=$2
  1985. local classname=$3
  1986. local daemon_pidfile=$4
  1987. local daemon_outfile=$5
  1988. local priv_pidfile=$6
  1989. local priv_outfile=$7
  1990. local priv_errfile=$8
  1991. shift 8
  1992. case ${daemonmode} in
  1993. status)
  1994. hadoop_status_daemon "${daemon_pidfile}"
  1995. exit $?
  1996. ;;
  1997. stop)
  1998. hadoop_stop_secure_daemon "${daemonname}" \
  1999. "${daemon_pidfile}" "${priv_pidfile}"
  2000. exit $?
  2001. ;;
  2002. ##COMPAT -- older hadoops would also start daemons by default
  2003. start|default)
  2004. hadoop_verify_piddir
  2005. hadoop_verify_logdir
  2006. hadoop_status_daemon "${daemon_pidfile}"
  2007. if [[ $? == 0 ]]; then
  2008. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  2009. exit 1
  2010. else
  2011. # stale pid file, so just remove it and continue on
  2012. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  2013. fi
  2014. ##COMPAT - differenticate between --daemon start and nothing
  2015. # "nothing" shouldn't detach
  2016. if [[ "${daemonmode}" = "default" ]]; then
  2017. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  2018. "${daemon_pidfile}" "${daemon_outfile}" \
  2019. "${priv_errfile}" "${priv_pidfile}" "$@"
  2020. else
  2021. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  2022. "${daemon_pidfile}" "${daemon_outfile}" \
  2023. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  2024. fi
  2025. ;;
  2026. esac
  2027. }
  2028. ## @description autodetect whether this is a priv subcmd
  2029. ## @description by whether or not a priv user var exists
  2030. ## @description and if HADOOP_SECURE_CLASSNAME is defined
  2031. ## @audience public
  2032. ## @stability stable
  2033. ## @replaceable yes
  2034. ## @param command
  2035. ## @param subcommand
  2036. ## @return 1 = not priv
  2037. ## @return 0 = priv
  2038. function hadoop_detect_priv_subcmd
  2039. {
  2040. declare program=$1
  2041. declare command=$2
  2042. if [[ -z "${HADOOP_SECURE_CLASSNAME}" ]]; then
  2043. hadoop_debug "No secure classname defined."
  2044. return 1
  2045. fi
  2046. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_USER)
  2047. if [[ -z "${!uvar}" ]]; then
  2048. hadoop_debug "No secure user defined."
  2049. return 1
  2050. fi
  2051. return 0
  2052. }
  2053. ## @description Build custom subcommand var
  2054. ## @audience public
  2055. ## @stability stable
  2056. ## @replaceable yes
  2057. ## @param command
  2058. ## @param subcommand
  2059. ## @param customid
  2060. ## @return string
  2061. function hadoop_build_custom_subcmd_var
  2062. {
  2063. declare program=$1
  2064. declare command=$2
  2065. declare custom=$3
  2066. declare uprogram
  2067. declare ucommand
  2068. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  2069. || [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then
  2070. uprogram=$(echo "${program}" | tr '[:lower:]' '[:upper:]')
  2071. ucommand=$(echo "${command}" | tr '[:lower:]' '[:upper:]')
  2072. else
  2073. uprogram=${program^^}
  2074. ucommand=${command^^}
  2075. fi
  2076. echo "${uprogram}_${ucommand}_${custom}"
  2077. }
  2078. ## @description Verify that username in a var converts to user id
  2079. ## @audience public
  2080. ## @stability stable
  2081. ## @replaceable yes
  2082. ## @param userstring
  2083. ## @return 0 for success
  2084. ## @return 1 for failure
  2085. function hadoop_verify_user_resolves
  2086. {
  2087. declare userstr=$1
  2088. if [[ -z ${userstr} || -z ${!userstr} ]] ; then
  2089. return 1
  2090. fi
  2091. id -u "${!userstr}" >/dev/null 2>&1
  2092. }
  2093. ## @description Verify that ${USER} is allowed to execute the
  2094. ## @description given subcommand.
  2095. ## @audience public
  2096. ## @stability stable
  2097. ## @replaceable yes
  2098. ## @param command
  2099. ## @param subcommand
  2100. ## @return return 0 on success
  2101. ## @return exit 1 on failure
  2102. function hadoop_verify_user_perm
  2103. {
  2104. declare program=$1
  2105. declare command=$2
  2106. declare uvar
  2107. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  2108. if [[ -n ${!uvar} ]]; then
  2109. if [[ ${!uvar} != "${USER}" ]]; then
  2110. hadoop_error "ERROR: ${command} can only be executed by ${!uvar}."
  2111. exit 1
  2112. fi
  2113. fi
  2114. return 0
  2115. }
  2116. ## @description Verify that ${USER} is allowed to execute the
  2117. ## @description given subcommand.
  2118. ## @audience public
  2119. ## @stability stable
  2120. ## @replaceable yes
  2121. ## @param subcommand
  2122. ## @return 1 on no re-exec needed
  2123. ## @return 0 on need to re-exec
  2124. function hadoop_need_reexec
  2125. {
  2126. declare program=$1
  2127. declare command=$2
  2128. declare uvar
  2129. # we've already been re-execed, bail
  2130. if [[ "${HADOOP_REEXECED_CMD}" = true ]]; then
  2131. return 1
  2132. fi
  2133. # if we have privilege, and the _USER is defined, and _USER is
  2134. # set to someone who isn't us, then yes, we should re-exec.
  2135. # otherwise no, don't re-exec and let the system deal with it.
  2136. if hadoop_privilege_check; then
  2137. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  2138. if [[ -n ${!uvar} ]]; then
  2139. if [[ ${!uvar} != "${USER}" ]]; then
  2140. return 0
  2141. fi
  2142. fi
  2143. fi
  2144. return 1
  2145. }
  2146. ## @description Add custom (program)_(command)_OPTS to HADOOP_OPTS.
  2147. ## @description Also handles the deprecated cases from pre-3.x.
  2148. ## @audience public
  2149. ## @stability evolving
  2150. ## @replaceable yes
  2151. ## @param program
  2152. ## @param subcommand
  2153. ## @return will exit on failure conditions
  2154. function hadoop_subcommand_opts
  2155. {
  2156. declare program=$1
  2157. declare command=$2
  2158. declare uvar
  2159. declare depvar
  2160. declare uprogram
  2161. declare ucommand
  2162. if [[ -z "${program}" || -z "${command}" ]]; then
  2163. return 1
  2164. fi
  2165. # bash 4 and up have built-in ways to upper and lower
  2166. # case the contents of vars. This is faster than
  2167. # calling tr.
  2168. ## We don't call hadoop_build_custom_subcmd_var here
  2169. ## since we need to construct this for the deprecation
  2170. ## cases. For Hadoop 4.x, this needs to get cleaned up.
  2171. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  2172. || [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then
  2173. uprogram=$(echo "${program}" | tr '[:lower:]' '[:upper:]')
  2174. ucommand=$(echo "${command}" | tr '[:lower:]' '[:upper:]')
  2175. else
  2176. uprogram=${program^^}
  2177. ucommand=${command^^}
  2178. fi
  2179. uvar="${uprogram}_${ucommand}_OPTS"
  2180. # Let's handle all of the deprecation cases early
  2181. # HADOOP_NAMENODE_OPTS -> HDFS_NAMENODE_OPTS
  2182. depvar="HADOOP_${ucommand}_OPTS"
  2183. if [[ "${depvar}" != "${uvar}" ]]; then
  2184. if [[ -n "${!depvar}" ]]; then
  2185. hadoop_deprecate_envvar "${depvar}" "${uvar}"
  2186. fi
  2187. fi
  2188. if [[ -n ${!uvar} ]]; then
  2189. hadoop_debug "Appending ${uvar} onto HADOOP_OPTS"
  2190. HADOOP_OPTS="${HADOOP_OPTS} ${!uvar}"
  2191. return 0
  2192. fi
  2193. }
  2194. ## @description Add custom (program)_(command)_SECURE_EXTRA_OPTS to HADOOP_OPTS.
  2195. ## @description This *does not* handle the pre-3.x deprecated cases
  2196. ## @audience public
  2197. ## @stability stable
  2198. ## @replaceable yes
  2199. ## @param program
  2200. ## @param subcommand
  2201. ## @return will exit on failure conditions
  2202. function hadoop_subcommand_secure_opts
  2203. {
  2204. declare program=$1
  2205. declare command=$2
  2206. declare uvar
  2207. declare uprogram
  2208. declare ucommand
  2209. if [[ -z "${program}" || -z "${command}" ]]; then
  2210. return 1
  2211. fi
  2212. # HDFS_DATANODE_SECURE_EXTRA_OPTS
  2213. # HDFS_NFS3_SECURE_EXTRA_OPTS
  2214. # ...
  2215. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_EXTRA_OPTS)
  2216. if [[ -n ${!uvar} ]]; then
  2217. hadoop_debug "Appending ${uvar} onto HADOOP_OPTS"
  2218. HADOOP_OPTS="${HADOOP_OPTS} ${!uvar}"
  2219. return 0
  2220. fi
  2221. }
  2222. ## @description Perform the 'hadoop classpath', etc subcommand with the given
  2223. ## @description parameters
  2224. ## @audience private
  2225. ## @stability evolving
  2226. ## @replaceable yes
  2227. ## @param [parameters]
  2228. ## @return will print & exit with no params
  2229. function hadoop_do_classpath_subcommand
  2230. {
  2231. if [[ "$#" -gt 1 ]]; then
  2232. eval "$1"=org.apache.hadoop.util.Classpath
  2233. else
  2234. hadoop_finalize
  2235. echo "${CLASSPATH}"
  2236. exit 0
  2237. fi
  2238. }
  2239. ## @description generic shell script opton parser. sets
  2240. ## @description HADOOP_PARSE_COUNTER to set number the
  2241. ## @description caller should shift
  2242. ## @audience private
  2243. ## @stability evolving
  2244. ## @replaceable yes
  2245. ## @param [parameters, typically "$@"]
  2246. function hadoop_parse_args
  2247. {
  2248. HADOOP_DAEMON_MODE="default"
  2249. HADOOP_PARSE_COUNTER=0
  2250. # not all of the options supported here are supported by all commands
  2251. # however these are:
  2252. hadoop_add_option "--config dir" "Hadoop config directory"
  2253. hadoop_add_option "--debug" "turn on shell script debug mode"
  2254. hadoop_add_option "--help" "usage information"
  2255. while true; do
  2256. hadoop_debug "hadoop_parse_args: processing $1"
  2257. case $1 in
  2258. --buildpaths)
  2259. HADOOP_ENABLE_BUILD_PATHS=true
  2260. shift
  2261. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2262. ;;
  2263. --config)
  2264. shift
  2265. confdir=$1
  2266. shift
  2267. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2268. if [[ -d "${confdir}" ]]; then
  2269. HADOOP_CONF_DIR="${confdir}"
  2270. elif [[ -z "${confdir}" ]]; then
  2271. hadoop_error "ERROR: No parameter provided for --config "
  2272. hadoop_exit_with_usage 1
  2273. else
  2274. hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
  2275. hadoop_exit_with_usage 1
  2276. fi
  2277. ;;
  2278. --daemon)
  2279. shift
  2280. HADOOP_DAEMON_MODE=$1
  2281. shift
  2282. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2283. if [[ -z "${HADOOP_DAEMON_MODE}" || \
  2284. ! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
  2285. hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
  2286. hadoop_exit_with_usage 1
  2287. fi
  2288. ;;
  2289. --debug)
  2290. shift
  2291. HADOOP_SHELL_SCRIPT_DEBUG=true
  2292. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2293. ;;
  2294. --help|-help|-h|help|--h|--\?|-\?|\?)
  2295. hadoop_exit_with_usage 0
  2296. ;;
  2297. --hostnames)
  2298. shift
  2299. HADOOP_WORKER_NAMES="$1"
  2300. shift
  2301. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2302. ;;
  2303. --hosts)
  2304. shift
  2305. hadoop_populate_workers_file "$1"
  2306. shift
  2307. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2308. ;;
  2309. --loglevel)
  2310. shift
  2311. # shellcheck disable=SC2034
  2312. HADOOP_LOGLEVEL="$1"
  2313. shift
  2314. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2315. ;;
  2316. --reexec)
  2317. shift
  2318. if [[ "${HADOOP_REEXECED_CMD}" = true ]]; then
  2319. hadoop_error "ERROR: re-exec fork bomb prevention: --reexec already called"
  2320. exit 1
  2321. fi
  2322. HADOOP_REEXECED_CMD=true
  2323. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2324. ;;
  2325. --workers)
  2326. shift
  2327. # shellcheck disable=SC2034
  2328. HADOOP_WORKER_MODE=true
  2329. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2330. ;;
  2331. *)
  2332. break
  2333. ;;
  2334. esac
  2335. done
  2336. hadoop_debug "hadoop_parse: asking caller to skip ${HADOOP_PARSE_COUNTER}"
  2337. }
  2338. ## @description Handle subcommands from main program entries
  2339. ## @audience private
  2340. ## @stability evolving
  2341. ## @replaceable yes
  2342. function hadoop_generic_java_subcmd_handler
  2343. {
  2344. declare priv_outfile
  2345. declare priv_errfile
  2346. declare priv_pidfile
  2347. declare daemon_outfile
  2348. declare daemon_pidfile
  2349. declare secureuser
  2350. # The default/expected way to determine if a daemon is going to run in secure
  2351. # mode is defined by hadoop_detect_priv_subcmd. If this returns true
  2352. # then setup the secure user var and tell the world we're in secure mode
  2353. if hadoop_detect_priv_subcmd "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"; then
  2354. HADOOP_SUBCMD_SECURESERVICE=true
  2355. secureuser=$(hadoop_build_custom_subcmd_var "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}" SECURE_USER)
  2356. if ! hadoop_verify_user_resolves "${secureuser}"; then
  2357. hadoop_error "ERROR: User defined in ${secureuser} (${!secureuser}) does not exist. Aborting."
  2358. exit 1
  2359. fi
  2360. HADOOP_SECURE_USER="${!secureuser}"
  2361. fi
  2362. # check if we're running in secure mode.
  2363. # breaking this up from the above lets 3rd parties
  2364. # do things a bit different
  2365. # secure services require some extra setup
  2366. # if yes, then we need to define all of the priv and daemon stuff
  2367. # if not, then we just need to define daemon stuff.
  2368. # note the daemon vars are purposefully different between the two
  2369. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2370. hadoop_subcommand_secure_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
  2371. hadoop_verify_secure_prereq
  2372. hadoop_setup_secure_service
  2373. priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2374. priv_errfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.err"
  2375. priv_pidfile="${HADOOP_PID_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2376. daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2377. daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2378. else
  2379. daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2380. daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2381. fi
  2382. # are we actually in daemon mode?
  2383. # if yes, use the daemon logger and the appropriate log file.
  2384. if [[ "${HADOOP_DAEMON_MODE}" != "default" ]]; then
  2385. HADOOP_ROOT_LOGGER="${HADOOP_DAEMON_ROOT_LOGGER}"
  2386. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2387. HADOOP_LOGFILE="hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
  2388. else
  2389. HADOOP_LOGFILE="hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
  2390. fi
  2391. fi
  2392. # finish defining the environment: system properties, env vars, class paths, etc.
  2393. hadoop_finalize
  2394. # do the hard work of launching a daemon or just executing our interactive
  2395. # java class
  2396. if [[ "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" = true ]]; then
  2397. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2398. hadoop_secure_daemon_handler \
  2399. "${HADOOP_DAEMON_MODE}" \
  2400. "${HADOOP_SUBCMD}" \
  2401. "${HADOOP_SECURE_CLASSNAME}" \
  2402. "${daemon_pidfile}" \
  2403. "${daemon_outfile}" \
  2404. "${priv_pidfile}" \
  2405. "${priv_outfile}" \
  2406. "${priv_errfile}" \
  2407. "${HADOOP_SUBCMD_ARGS[@]}"
  2408. else
  2409. hadoop_daemon_handler \
  2410. "${HADOOP_DAEMON_MODE}" \
  2411. "${HADOOP_SUBCMD}" \
  2412. "${HADOOP_CLASSNAME}" \
  2413. "${daemon_pidfile}" \
  2414. "${daemon_outfile}" \
  2415. "${HADOOP_SUBCMD_ARGS[@]}"
  2416. fi
  2417. exit $?
  2418. else
  2419. hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "${HADOOP_SUBCMD_ARGS[@]}"
  2420. fi
  2421. }