hadoop-functions.sh 76 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # we need to declare this globally as an array, which can only
  17. # be done outside of a function
  18. declare -a HADOOP_SUBCMD_USAGE
  19. declare -a HADOOP_OPTION_USAGE
  20. declare -a HADOOP_SUBCMD_USAGE_TYPES
  21. ## @description Print a message to stderr
  22. ## @audience public
  23. ## @stability stable
  24. ## @replaceable no
  25. ## @param string
  26. function hadoop_error
  27. {
  28. echo "$*" 1>&2
  29. }
  30. ## @description Print a message to stderr if --debug is turned on
  31. ## @audience public
  32. ## @stability stable
  33. ## @replaceable no
  34. ## @param string
  35. function hadoop_debug
  36. {
  37. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  38. echo "DEBUG: $*" 1>&2
  39. fi
  40. }
  41. ## @description Given a filename or dir, return the absolute version of it
  42. ## @description This works as an alternative to readlink, which isn't
  43. ## @description portable.
  44. ## @audience public
  45. ## @stability stable
  46. ## @param fsobj
  47. ## @replaceable no
  48. ## @return 0 success
  49. ## @return 1 failure
  50. ## @return stdout abspath
  51. function hadoop_abs
  52. {
  53. declare obj=$1
  54. declare dir
  55. declare fn
  56. declare dirret
  57. if [[ ! -e ${obj} ]]; then
  58. return 1
  59. elif [[ -d ${obj} ]]; then
  60. dir=${obj}
  61. else
  62. dir=$(dirname -- "${obj}")
  63. fn=$(basename -- "${obj}")
  64. fn="/${fn}"
  65. fi
  66. dir=$(cd -P -- "${dir}" >/dev/null 2>/dev/null && pwd -P)
  67. dirret=$?
  68. if [[ ${dirret} = 0 ]]; then
  69. echo "${dir}${fn}"
  70. return 0
  71. fi
  72. return 1
  73. }
  74. ## @description Given variable $1 delete $2 from it
  75. ## @audience public
  76. ## @stability stable
  77. ## @replaceable no
  78. function hadoop_delete_entry
  79. {
  80. if [[ ${!1} =~ \ ${2}\ ]] ; then
  81. hadoop_debug "Removing ${2} from ${1}"
  82. eval "${1}"=\""${!1// ${2} }"\"
  83. fi
  84. }
  85. ## @description Given variable $1 add $2 to it
  86. ## @audience public
  87. ## @stability stable
  88. ## @replaceable no
  89. function hadoop_add_entry
  90. {
  91. if [[ ! ${!1} =~ \ ${2}\ ]] ; then
  92. hadoop_debug "Adding ${2} to ${1}"
  93. #shellcheck disable=SC2140
  94. eval "${1}"=\""${!1} ${2} "\"
  95. fi
  96. }
  97. ## @description Given variable $1 determine if $2 is in it
  98. ## @audience public
  99. ## @stability stable
  100. ## @replaceable no
  101. ## @return 0 = yes, 1 = no
  102. function hadoop_verify_entry
  103. {
  104. # this unfortunately can't really be tested by bats. :(
  105. # so if this changes, be aware that unit tests effectively
  106. # do this function in them
  107. [[ ${!1} =~ \ ${2}\ ]]
  108. }
  109. ## @description Check if an array has a given value
  110. ## @audience public
  111. ## @stability stable
  112. ## @replaceable yes
  113. ## @param element
  114. ## @param array
  115. ## @returns 0 = yes
  116. ## @returns 1 = no
  117. function hadoop_array_contains
  118. {
  119. declare element=$1
  120. shift
  121. declare val
  122. if [[ "$#" -eq 0 ]]; then
  123. return 1
  124. fi
  125. for val in "${@}"; do
  126. if [[ "${val}" == "${element}" ]]; then
  127. return 0
  128. fi
  129. done
  130. return 1
  131. }
  132. ## @description Add the `appendstring` if `checkstring` is not
  133. ## @description present in the given array
  134. ## @audience public
  135. ## @stability stable
  136. ## @replaceable yes
  137. ## @param envvar
  138. ## @param appendstring
  139. function hadoop_add_array_param
  140. {
  141. declare arrname=$1
  142. declare add=$2
  143. declare arrref="${arrname}[@]"
  144. declare array=("${!arrref}")
  145. if ! hadoop_array_contains "${add}" "${array[@]}"; then
  146. #shellcheck disable=SC1083,SC2086
  147. eval ${arrname}=\(\"\${array[@]}\" \"${add}\" \)
  148. hadoop_debug "$1 accepted $2"
  149. else
  150. hadoop_debug "$1 declined $2"
  151. fi
  152. }
  153. ## @description Sort an array (must not contain regexps)
  154. ## @description present in the given array
  155. ## @audience public
  156. ## @stability stable
  157. ## @replaceable yes
  158. ## @param arrayvar
  159. function hadoop_sort_array
  160. {
  161. declare arrname=$1
  162. declare arrref="${arrname}[@]"
  163. declare array=("${!arrref}")
  164. declare oifs
  165. declare globstatus
  166. declare -a sa
  167. globstatus=$(set -o | grep noglob | awk '{print $NF}')
  168. set -f
  169. oifs=${IFS}
  170. # shellcheck disable=SC2034
  171. IFS=$'\n' sa=($(sort <<<"${array[*]}"))
  172. # shellcheck disable=SC1083
  173. eval "${arrname}"=\(\"\${sa[@]}\"\)
  174. IFS=${oifs}
  175. if [[ "${globstatus}" = off ]]; then
  176. set +f
  177. fi
  178. }
  179. ## @description Check if we are running with priv
  180. ## @description by default, this implementation looks for
  181. ## @description EUID=0. For OSes that have true priv
  182. ## @description separation, this should be something more complex
  183. ## @audience private
  184. ## @stability evolving
  185. ## @replaceable yes
  186. ## @return 1 = no priv
  187. ## @return 0 = priv
  188. function hadoop_privilege_check
  189. {
  190. [[ "${EUID}" = 0 ]]
  191. }
  192. ## @description Execute a command via sudo when running as root
  193. ## @description if the given user is found or exit with
  194. ## @description failure if not.
  195. ## @description otherwise just run it. (This is intended to
  196. ## @description be used by the start-*/stop-* scripts.)
  197. ## @audience private
  198. ## @stability evolving
  199. ## @replaceable yes
  200. ## @param user
  201. ## @param commandstring
  202. ## @return exitstatus
  203. function hadoop_sudo
  204. {
  205. declare user=$1
  206. shift
  207. if hadoop_privilege_check; then
  208. if hadoop_verify_user_resolves user; then
  209. sudo -u "${user}" -- "$@"
  210. else
  211. hadoop_error "ERROR: Refusing to run as root: ${user} account is not found. Aborting."
  212. return 1
  213. fi
  214. else
  215. "$@"
  216. fi
  217. }
  218. ## @description Execute a command via sudo when running as root
  219. ## @description with extra support for commands that might
  220. ## @description legitimately start as root (e.g., datanode)
  221. ## @description (This is intended to
  222. ## @description be used by the start-*/stop-* scripts.)
  223. ## @audience private
  224. ## @stability evolving
  225. ## @replaceable no
  226. ## @param user
  227. ## @param commandstring
  228. ## @return exitstatus
  229. function hadoop_uservar_su
  230. {
  231. ## startup matrix:
  232. #
  233. # if $EUID != 0, then exec
  234. # if $EUID =0 then
  235. # if hdfs_subcmd_user is defined, call hadoop_sudo to exec
  236. # if hdfs_subcmd_user is not defined, error
  237. #
  238. # For secure daemons, this means both the secure and insecure env vars need to be
  239. # defined. e.g., HDFS_DATANODE_USER=root HDFS_DATANODE_SECURE_USER=hdfs
  240. # This function will pick up the "normal" var, switch to that user, then
  241. # execute the command which will then pick up the "secure" version.
  242. #
  243. declare program=$1
  244. declare command=$2
  245. shift 2
  246. declare uprogram
  247. declare ucommand
  248. declare uvar
  249. declare svar
  250. if hadoop_privilege_check; then
  251. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  252. svar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_USER)
  253. if [[ -n "${!uvar}" ]]; then
  254. hadoop_sudo "${!uvar}" "$@"
  255. elif [[ -n "${!svar}" ]]; then
  256. ## if we are here, then SECURE_USER with no USER defined
  257. ## we are already privileged, so just run the command and hope
  258. ## for the best
  259. "$@"
  260. else
  261. hadoop_error "ERROR: Attempting to operate on ${program} ${command} as root"
  262. hadoop_error "ERROR: but there is no ${uvar} defined. Aborting operation."
  263. return 1
  264. fi
  265. else
  266. "$@"
  267. fi
  268. }
  269. ## @description Add a subcommand to the usage output
  270. ## @audience private
  271. ## @stability evolving
  272. ## @replaceable no
  273. ## @param subcommand
  274. ## @param subcommandtype
  275. ## @param subcommanddesc
  276. function hadoop_add_subcommand
  277. {
  278. declare subcmd=$1
  279. declare subtype=$2
  280. declare text=$3
  281. hadoop_debug "${subcmd} as a ${subtype}"
  282. hadoop_add_array_param HADOOP_SUBCMD_USAGE_TYPES "${subtype}"
  283. # done in this order so that sort works later
  284. HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${subtype}@${text}"
  285. ((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
  286. }
  287. ## @description Add an option to the usage output
  288. ## @audience private
  289. ## @stability evolving
  290. ## @replaceable no
  291. ## @param subcommand
  292. ## @param subcommanddesc
  293. function hadoop_add_option
  294. {
  295. local option=$1
  296. local text=$2
  297. HADOOP_OPTION_USAGE[${HADOOP_OPTION_USAGE_COUNTER}]="${option}@${text}"
  298. ((HADOOP_OPTION_USAGE_COUNTER=HADOOP_OPTION_USAGE_COUNTER+1))
  299. }
  300. ## @description Reset the usage information to blank
  301. ## @audience private
  302. ## @stability evolving
  303. ## @replaceable no
  304. function hadoop_reset_usage
  305. {
  306. HADOOP_SUBCMD_USAGE=()
  307. HADOOP_OPTION_USAGE=()
  308. HADOOP_SUBCMD_USAGE_TYPES=()
  309. HADOOP_SUBCMD_USAGE_COUNTER=0
  310. HADOOP_OPTION_USAGE_COUNTER=0
  311. }
  312. ## @description Print a screen-size aware two-column output
  313. ## @description if reqtype is not null, only print those requested
  314. ## @audience private
  315. ## @stability evolving
  316. ## @replaceable no
  317. ## @param reqtype
  318. ## @param array
  319. function hadoop_generic_columnprinter
  320. {
  321. declare reqtype=$1
  322. shift
  323. declare -a input=("$@")
  324. declare -i i=0
  325. declare -i counter=0
  326. declare line
  327. declare text
  328. declare option
  329. declare giventext
  330. declare -i maxoptsize
  331. declare -i foldsize
  332. declare -a tmpa
  333. declare numcols
  334. declare brup
  335. if [[ -n "${COLUMNS}" ]]; then
  336. numcols=${COLUMNS}
  337. else
  338. numcols=$(tput cols) 2>/dev/null
  339. COLUMNS=${numcols}
  340. fi
  341. if [[ -z "${numcols}"
  342. || ! "${numcols}" =~ ^[0-9]+$ ]]; then
  343. numcols=75
  344. else
  345. ((numcols=numcols-5))
  346. fi
  347. while read -r line; do
  348. tmpa[${counter}]=${line}
  349. ((counter=counter+1))
  350. IFS='@' read -ra brup <<< "${line}"
  351. option="${brup[0]}"
  352. if [[ ${#option} -gt ${maxoptsize} ]]; then
  353. maxoptsize=${#option}
  354. fi
  355. done < <(for text in "${input[@]}"; do
  356. echo "${text}"
  357. done | sort)
  358. i=0
  359. ((foldsize=numcols-maxoptsize))
  360. until [[ $i -eq ${#tmpa[@]} ]]; do
  361. IFS='@' read -ra brup <<< "${tmpa[$i]}"
  362. option="${brup[0]}"
  363. cmdtype="${brup[1]}"
  364. giventext="${brup[2]}"
  365. if [[ -n "${reqtype}" ]]; then
  366. if [[ "${cmdtype}" != "${reqtype}" ]]; then
  367. ((i=i+1))
  368. continue
  369. fi
  370. fi
  371. if [[ -z "${giventext}" ]]; then
  372. giventext=${cmdtype}
  373. fi
  374. while read -r line; do
  375. printf "%-${maxoptsize}s %-s\n" "${option}" "${line}"
  376. option=" "
  377. done < <(echo "${giventext}"| fold -s -w ${foldsize})
  378. ((i=i+1))
  379. done
  380. }
  381. ## @description generate standard usage output
  382. ## @description and optionally takes a class
  383. ## @audience private
  384. ## @stability evolving
  385. ## @replaceable no
  386. ## @param execname
  387. ## @param true|false
  388. ## @param [text to use in place of SUBCOMMAND]
  389. function hadoop_generate_usage
  390. {
  391. declare cmd=$1
  392. declare takesclass=$2
  393. declare subcmdtext=${3:-"SUBCOMMAND"}
  394. declare haveoptions
  395. declare optstring
  396. declare havesubs
  397. declare subcmdstring
  398. declare cmdtype
  399. cmd=${cmd##*/}
  400. if [[ -n "${HADOOP_OPTION_USAGE_COUNTER}"
  401. && "${HADOOP_OPTION_USAGE_COUNTER}" -gt 0 ]]; then
  402. haveoptions=true
  403. optstring=" [OPTIONS]"
  404. fi
  405. if [[ -n "${HADOOP_SUBCMD_USAGE_COUNTER}"
  406. && "${HADOOP_SUBCMD_USAGE_COUNTER}" -gt 0 ]]; then
  407. havesubs=true
  408. subcmdstring=" ${subcmdtext} [${subcmdtext} OPTIONS]"
  409. fi
  410. echo "Usage: ${cmd}${optstring}${subcmdstring}"
  411. if [[ ${takesclass} = true ]]; then
  412. echo " or ${cmd}${optstring} CLASSNAME [CLASSNAME OPTIONS]"
  413. echo " where CLASSNAME is a user-provided Java class"
  414. fi
  415. if [[ "${haveoptions}" = true ]]; then
  416. echo ""
  417. echo " OPTIONS is none or any of:"
  418. echo ""
  419. hadoop_generic_columnprinter "" "${HADOOP_OPTION_USAGE[@]}"
  420. fi
  421. if [[ "${havesubs}" = true ]]; then
  422. echo ""
  423. echo " ${subcmdtext} is one of:"
  424. echo ""
  425. if [[ "${#HADOOP_SUBCMD_USAGE_TYPES[@]}" -gt 0 ]]; then
  426. hadoop_sort_array HADOOP_SUBCMD_USAGE_TYPES
  427. for subtype in "${HADOOP_SUBCMD_USAGE_TYPES[@]}"; do
  428. #shellcheck disable=SC2086
  429. cmdtype="$(tr '[:lower:]' '[:upper:]' <<< ${subtype:0:1})${subtype:1}"
  430. printf "\n %s Commands:\n\n" "${cmdtype}"
  431. hadoop_generic_columnprinter "${subtype}" "${HADOOP_SUBCMD_USAGE[@]}"
  432. done
  433. else
  434. hadoop_generic_columnprinter "" "${HADOOP_SUBCMD_USAGE[@]}"
  435. fi
  436. echo ""
  437. echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
  438. fi
  439. }
  440. ## @description Replace `oldvar` with `newvar` if `oldvar` exists.
  441. ## @audience public
  442. ## @stability stable
  443. ## @replaceable yes
  444. ## @param oldvar
  445. ## @param newvar
  446. function hadoop_deprecate_envvar
  447. {
  448. local oldvar=$1
  449. local newvar=$2
  450. local oldval=${!oldvar}
  451. local newval=${!newvar}
  452. if [[ -n "${oldval}" ]]; then
  453. hadoop_error "WARNING: ${oldvar} has been replaced by ${newvar}. Using value of ${oldvar}."
  454. # shellcheck disable=SC2086
  455. eval ${newvar}=\"${oldval}\"
  456. # shellcheck disable=SC2086
  457. newval=${oldval}
  458. # shellcheck disable=SC2086
  459. eval ${newvar}=\"${newval}\"
  460. fi
  461. }
  462. ## @description Declare `var` being used and print its value.
  463. ## @audience public
  464. ## @stability stable
  465. ## @replaceable yes
  466. ## @param var
  467. function hadoop_using_envvar
  468. {
  469. local var=$1
  470. local val=${!var}
  471. if [[ -n "${val}" ]]; then
  472. hadoop_debug "${var} = ${val}"
  473. fi
  474. }
  475. ## @description Create the directory 'dir'.
  476. ## @audience public
  477. ## @stability stable
  478. ## @replaceable yes
  479. ## @param dir
  480. function hadoop_mkdir
  481. {
  482. local dir=$1
  483. if [[ ! -w "${dir}" ]] && [[ ! -d "${dir}" ]]; then
  484. hadoop_error "WARNING: ${dir} does not exist. Creating."
  485. if ! mkdir -p "${dir}"; then
  486. hadoop_error "ERROR: Unable to create ${dir}. Aborting."
  487. exit 1
  488. fi
  489. fi
  490. }
  491. ## @description Bootstraps the Hadoop shell environment
  492. ## @audience private
  493. ## @stability evolving
  494. ## @replaceable no
  495. function hadoop_bootstrap
  496. {
  497. # the root of the Hadoop installation
  498. # See HADOOP-6255 for the expected directory structure layout
  499. if [[ -n "${DEFAULT_LIBEXEC_DIR}" ]]; then
  500. hadoop_error "WARNING: DEFAULT_LIBEXEC_DIR ignored. It has been replaced by HADOOP_DEFAULT_LIBEXEC_DIR."
  501. fi
  502. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  503. # We can piggyback off of that to figure out where the default
  504. # HADOOP_FREFIX should be. This allows us to run without
  505. # HADOOP_HOME ever being defined by a human! As a consequence
  506. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  507. # env var within Hadoop.
  508. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  509. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  510. exit 1
  511. fi
  512. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  513. HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DEFAULT_PREFIX}
  514. export HADOOP_HOME
  515. #
  516. # short-cuts. vendors may redefine these as well, preferably
  517. # in hadoop-layout.sh
  518. #
  519. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  520. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  521. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  522. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  523. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  524. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  525. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  526. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  527. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  528. HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}}
  529. HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
  530. HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}
  531. # by default, whatever we are about to run doesn't support
  532. # daemonization
  533. HADOOP_SUBCMD_SUPPORTDAEMONIZATION=false
  534. # by default, we have not been self-re-execed
  535. HADOOP_REEXECED_CMD=false
  536. HADOOP_SUBCMD_SECURESERVICE=false
  537. # This is the default we claim in hadoop-env.sh
  538. JSVC_HOME=${JSVC_HOME:-"/usr/bin"}
  539. # usage output set to zero
  540. hadoop_reset_usage
  541. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  542. # defaults
  543. # shellcheck disable=SC2154
  544. if [[ "${HADOOP_ALLOW_IPV6}" -ne "yes" ]]; then
  545. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  546. else
  547. export HADOOP_OPTS=${HADOOP_OPTS:-""}
  548. fi
  549. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  550. }
  551. ## @description Locate Hadoop's configuration directory
  552. ## @audience private
  553. ## @stability evolving
  554. ## @replaceable no
  555. function hadoop_find_confdir
  556. {
  557. local conf_dir
  558. # An attempt at compatibility with some Hadoop 1.x
  559. # installs.
  560. if [[ -e "${HADOOP_HOME}/conf/hadoop-env.sh" ]]; then
  561. conf_dir="conf"
  562. else
  563. conf_dir="etc/hadoop"
  564. fi
  565. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_HOME}/${conf_dir}}"
  566. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  567. }
  568. ## @description Validate ${HADOOP_CONF_DIR}
  569. ## @audience public
  570. ## @stability stable
  571. ## @replaceable yes
  572. ## @return will exit on failure conditions
  573. function hadoop_verify_confdir
  574. {
  575. # Check only log4j.properties by default.
  576. # --loglevel does not work without logger settings in log4j.log4j.properties.
  577. if [[ ! -f "${HADOOP_CONF_DIR}/log4j.properties" ]]; then
  578. hadoop_error "WARNING: log4j.properties is not found. HADOOP_CONF_DIR may be incomplete."
  579. fi
  580. }
  581. ## @description Import the hadoop-env.sh settings
  582. ## @audience private
  583. ## @stability evolving
  584. ## @replaceable no
  585. function hadoop_exec_hadoopenv
  586. {
  587. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  588. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  589. export HADOOP_ENV_PROCESSED=true
  590. # shellcheck source=./hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
  591. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  592. fi
  593. fi
  594. }
  595. ## @description Import the replaced functions
  596. ## @audience private
  597. ## @stability evolving
  598. ## @replaceable no
  599. function hadoop_exec_userfuncs
  600. {
  601. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  602. # shellcheck disable=SC1090
  603. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  604. fi
  605. }
  606. ## @description Read the user's settings. This provides for users to
  607. ## @description override and/or append hadoop-env.sh. It is not meant
  608. ## @description as a complete system override.
  609. ## @audience private
  610. ## @stability evolving
  611. ## @replaceable yes
  612. function hadoop_exec_user_hadoopenv
  613. {
  614. if [[ -f "${HOME}/.hadoop-env" ]]; then
  615. hadoop_debug "Applying the user's .hadoop-env"
  616. # shellcheck disable=SC1090
  617. . "${HOME}/.hadoop-env"
  618. fi
  619. }
  620. ## @description Read the user's settings. This provides for users to
  621. ## @description run Hadoop Shell API after system bootstrap
  622. ## @audience private
  623. ## @stability evolving
  624. ## @replaceable yes
  625. function hadoop_exec_hadooprc
  626. {
  627. if [[ -f "${HOME}/.hadooprc" ]]; then
  628. hadoop_debug "Applying the user's .hadooprc"
  629. # shellcheck disable=SC1090
  630. . "${HOME}/.hadooprc"
  631. fi
  632. }
  633. ## @description Import shellprofile.d content
  634. ## @audience private
  635. ## @stability evolving
  636. ## @replaceable yes
  637. function hadoop_import_shellprofiles
  638. {
  639. local i
  640. local files1
  641. local files2
  642. if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
  643. files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
  644. hadoop_debug "shellprofiles: ${files1[*]}"
  645. else
  646. hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
  647. fi
  648. if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
  649. files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
  650. fi
  651. # enable bundled shellprofiles that come
  652. # from hadoop-tools. This converts the user-facing HADOOP_OPTIONAL_TOOLS
  653. # to the HADOOP_TOOLS_OPTIONS that the shell profiles expect.
  654. # See dist-tools-hooks-maker for how the example HADOOP_OPTIONAL_TOOLS
  655. # gets populated into hadoop-env.sh
  656. for i in ${HADOOP_OPTIONAL_TOOLS//,/ }; do
  657. hadoop_add_entry HADOOP_TOOLS_OPTIONS "${i}"
  658. done
  659. for i in "${files1[@]}" "${files2[@]}"
  660. do
  661. if [[ -n "${i}"
  662. && -f "${i}" ]]; then
  663. hadoop_debug "Profiles: importing ${i}"
  664. # shellcheck disable=SC1090
  665. . "${i}"
  666. fi
  667. done
  668. }
  669. ## @description Initialize the registered shell profiles
  670. ## @audience private
  671. ## @stability evolving
  672. ## @replaceable yes
  673. function hadoop_shellprofiles_init
  674. {
  675. local i
  676. for i in ${HADOOP_SHELL_PROFILES}
  677. do
  678. if declare -F _${i}_hadoop_init >/dev/null ; then
  679. hadoop_debug "Profiles: ${i} init"
  680. # shellcheck disable=SC2086
  681. _${i}_hadoop_init
  682. fi
  683. done
  684. }
  685. ## @description Apply the shell profile classpath additions
  686. ## @audience private
  687. ## @stability evolving
  688. ## @replaceable yes
  689. function hadoop_shellprofiles_classpath
  690. {
  691. local i
  692. for i in ${HADOOP_SHELL_PROFILES}
  693. do
  694. if declare -F _${i}_hadoop_classpath >/dev/null ; then
  695. hadoop_debug "Profiles: ${i} classpath"
  696. # shellcheck disable=SC2086
  697. _${i}_hadoop_classpath
  698. fi
  699. done
  700. }
  701. ## @description Apply the shell profile native library additions
  702. ## @audience private
  703. ## @stability evolving
  704. ## @replaceable yes
  705. function hadoop_shellprofiles_nativelib
  706. {
  707. local i
  708. for i in ${HADOOP_SHELL_PROFILES}
  709. do
  710. if declare -F _${i}_hadoop_nativelib >/dev/null ; then
  711. hadoop_debug "Profiles: ${i} nativelib"
  712. # shellcheck disable=SC2086
  713. _${i}_hadoop_nativelib
  714. fi
  715. done
  716. }
  717. ## @description Apply the shell profile final configuration
  718. ## @audience private
  719. ## @stability evolving
  720. ## @replaceable yes
  721. function hadoop_shellprofiles_finalize
  722. {
  723. local i
  724. for i in ${HADOOP_SHELL_PROFILES}
  725. do
  726. if declare -F _${i}_hadoop_finalize >/dev/null ; then
  727. hadoop_debug "Profiles: ${i} finalize"
  728. # shellcheck disable=SC2086
  729. _${i}_hadoop_finalize
  730. fi
  731. done
  732. }
  733. ## @description Initialize the Hadoop shell environment, now that
  734. ## @description user settings have been imported
  735. ## @audience private
  736. ## @stability evolving
  737. ## @replaceable no
  738. function hadoop_basic_init
  739. {
  740. # Some of these are also set in hadoop-env.sh.
  741. # we still set them here just in case hadoop-env.sh is
  742. # broken in some way, set up defaults, etc.
  743. #
  744. # but it is important to note that if you update these
  745. # you also need to update hadoop-env.sh as well!!!
  746. CLASSPATH=""
  747. hadoop_debug "Initialize CLASSPATH"
  748. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  749. [[ -d "${HADOOP_HOME}/${HADOOP_COMMON_DIR}" ]]; then
  750. export HADOOP_COMMON_HOME="${HADOOP_HOME}"
  751. fi
  752. # default policy file for service-level authorization
  753. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  754. # define HADOOP_HDFS_HOME
  755. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  756. [[ -d "${HADOOP_HOME}/${HDFS_DIR}" ]]; then
  757. export HADOOP_HDFS_HOME="${HADOOP_HOME}"
  758. fi
  759. # define HADOOP_YARN_HOME
  760. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  761. [[ -d "${HADOOP_HOME}/${YARN_DIR}" ]]; then
  762. export HADOOP_YARN_HOME="${HADOOP_HOME}"
  763. fi
  764. # define HADOOP_MAPRED_HOME
  765. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  766. [[ -d "${HADOOP_HOME}/${MAPRED_DIR}" ]]; then
  767. export HADOOP_MAPRED_HOME="${HADOOP_HOME}"
  768. fi
  769. if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
  770. hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
  771. exit 1
  772. fi
  773. if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
  774. hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
  775. exit 1
  776. fi
  777. if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
  778. hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
  779. exit 1
  780. fi
  781. if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
  782. hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
  783. exit 1
  784. fi
  785. # if for some reason the shell doesn't have $USER defined
  786. # (e.g., ssh'd in to execute a command)
  787. # let's get the effective username and use that
  788. USER=${USER:-$(id -nu)}
  789. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  790. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_HOME}/logs"}
  791. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  792. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  793. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  794. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  795. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  796. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  797. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  798. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  799. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  800. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  801. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  802. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  803. }
  804. ## @description Set the worker support information to the contents
  805. ## @description of `filename`
  806. ## @audience public
  807. ## @stability stable
  808. ## @replaceable no
  809. ## @param filename
  810. ## @return will exit if file does not exist
  811. function hadoop_populate_workers_file
  812. {
  813. local workersfile=$1
  814. shift
  815. if [[ -f "${workersfile}" ]]; then
  816. HADOOP_WORKERS="${workersfile}"
  817. elif [[ -f "${HADOOP_CONF_DIR}/${workersfile}" ]]; then
  818. HADOOP_WORKERS="${HADOOP_CONF_DIR}/${workersfile}"
  819. else
  820. hadoop_error "ERROR: Cannot find hosts file \"${workersfile}\""
  821. hadoop_exit_with_usage 1
  822. fi
  823. }
  824. ## @description Rotates the given `file` until `number` of
  825. ## @description files exist.
  826. ## @audience public
  827. ## @stability stable
  828. ## @replaceable no
  829. ## @param filename
  830. ## @param [number]
  831. ## @return $? will contain last mv's return value
  832. function hadoop_rotate_log
  833. {
  834. #
  835. # Users are likely to replace this one for something
  836. # that gzips or uses dates or who knows what.
  837. #
  838. # be aware that &1 and &2 might go through here
  839. # so don't do anything too crazy...
  840. #
  841. local log=$1;
  842. local num=${2:-5};
  843. if [[ -f "${log}" ]]; then # rotate logs
  844. while [[ ${num} -gt 1 ]]; do
  845. #shellcheck disable=SC2086
  846. let prev=${num}-1
  847. if [[ -f "${log}.${prev}" ]]; then
  848. mv "${log}.${prev}" "${log}.${num}"
  849. fi
  850. num=${prev}
  851. done
  852. mv "${log}" "${log}.${num}"
  853. fi
  854. }
  855. ## @description Via ssh, log into `hostname` and run `command`
  856. ## @audience private
  857. ## @stability evolving
  858. ## @replaceable yes
  859. ## @param hostname
  860. ## @param command
  861. ## @param [...]
  862. function hadoop_actual_ssh
  863. {
  864. # we are passing this function to xargs
  865. # should get hostname followed by rest of command line
  866. local worker=$1
  867. shift
  868. # shellcheck disable=SC2086
  869. ssh ${HADOOP_SSH_OPTS} ${worker} $"${@// /\\ }" 2>&1 | sed "s/^/$worker: /"
  870. }
  871. ## @description Connect to ${HADOOP_WORKERS} or ${HADOOP_WORKER_NAMES}
  872. ## @description and execute command.
  873. ## @audience private
  874. ## @stability evolving
  875. ## @replaceable yes
  876. ## @param command
  877. ## @param [...]
  878. function hadoop_connect_to_hosts
  879. {
  880. # shellcheck disable=SC2124
  881. local params="$@"
  882. local worker_file
  883. local tmpslvnames
  884. #
  885. # ssh (or whatever) to a host
  886. #
  887. # User can specify hostnames or a file where the hostnames are (not both)
  888. if [[ -n "${HADOOP_WORKERS}" && -n "${HADOOP_WORKER_NAMES}" ]] ; then
  889. hadoop_error "ERROR: Both HADOOP_WORKERS and HADOOP_WORKER_NAMES were defined. Aborting."
  890. exit 1
  891. elif [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  892. if [[ -n "${HADOOP_WORKERS}" ]]; then
  893. worker_file=${HADOOP_WORKERS}
  894. elif [[ -f "${HADOOP_CONF_DIR}/workers" ]]; then
  895. worker_file=${HADOOP_CONF_DIR}/workers
  896. elif [[ -f "${HADOOP_CONF_DIR}/slaves" ]]; then
  897. hadoop_error "WARNING: 'slaves' file has been deprecated. Please use 'workers' file instead."
  898. worker_file=${HADOOP_CONF_DIR}/slaves
  899. fi
  900. fi
  901. # if pdsh is available, let's use it. otherwise default
  902. # to a loop around ssh. (ugh)
  903. if [[ -e '/usr/bin/pdsh' ]]; then
  904. if [[ -z "${HADOOP_WORKER_NAMES}" ]] ; then
  905. # if we were given a file, just let pdsh deal with it.
  906. # shellcheck disable=SC2086
  907. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  908. -f "${HADOOP_SSH_PARALLEL}" -w ^"${worker_file}" $"${@// /\\ }" 2>&1
  909. else
  910. # no spaces allowed in the pdsh arg host list
  911. # shellcheck disable=SC2086
  912. tmpslvnames=$(echo ${HADOOP_WORKER_NAMES} | tr -s ' ' ,)
  913. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  914. -f "${HADOOP_SSH_PARALLEL}" \
  915. -w "${tmpslvnames}" $"${@// /\\ }" 2>&1
  916. fi
  917. else
  918. if [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  919. HADOOP_WORKER_NAMES=$(sed 's/#.*$//;/^$/d' "${worker_file}")
  920. fi
  921. hadoop_connect_to_hosts_without_pdsh "${params}"
  922. fi
  923. }
  924. ## @description Connect to ${HADOOP_WORKER_NAMES} and execute command
  925. ## @description under the environment which does not support pdsh.
  926. ## @audience private
  927. ## @stability evolving
  928. ## @replaceable yes
  929. ## @param command
  930. ## @param [...]
  931. function hadoop_connect_to_hosts_without_pdsh
  932. {
  933. # shellcheck disable=SC2124
  934. local params="$@"
  935. local workers=(${HADOOP_WORKER_NAMES})
  936. for (( i = 0; i < ${#workers[@]}; i++ ))
  937. do
  938. if (( i != 0 && i % HADOOP_SSH_PARALLEL == 0 )); then
  939. wait
  940. fi
  941. # shellcheck disable=SC2086
  942. hadoop_actual_ssh "${workers[$i]}" ${params} &
  943. done
  944. wait
  945. }
  946. ## @description Utility routine to handle --workers mode
  947. ## @audience private
  948. ## @stability evolving
  949. ## @replaceable yes
  950. ## @param commandarray
  951. function hadoop_common_worker_mode_execute
  952. {
  953. #
  954. # input should be the command line as given by the user
  955. # in the form of an array
  956. #
  957. local argv=("$@")
  958. # if --workers is still on the command line, remove it
  959. # to prevent loops
  960. # Also remove --hostnames and --hosts along with arg values
  961. local argsSize=${#argv[@]};
  962. for (( i = 0; i < argsSize; i++ ))
  963. do
  964. if [[ "${argv[$i]}" =~ ^--workers$ ]]; then
  965. unset argv[$i]
  966. elif [[ "${argv[$i]}" =~ ^--hostnames$ ]] ||
  967. [[ "${argv[$i]}" =~ ^--hosts$ ]]; then
  968. unset argv[$i];
  969. let i++;
  970. unset argv[$i];
  971. fi
  972. done
  973. if [[ ${QATESTMODE} = true ]]; then
  974. echo "${argv[@]}"
  975. return
  976. fi
  977. hadoop_connect_to_hosts -- "${argv[@]}"
  978. }
  979. ## @description Verify that a shell command was passed a valid
  980. ## @description class name
  981. ## @audience public
  982. ## @stability stable
  983. ## @replaceable yes
  984. ## @param classname
  985. ## @return 0 = success
  986. ## @return 1 = failure w/user message
  987. function hadoop_validate_classname
  988. {
  989. local class=$1
  990. shift 1
  991. if [[ ! ${class} =~ \. ]]; then
  992. # assuming the arg is typo of command if it does not conatain ".".
  993. # class belonging to no package is not allowed as a result.
  994. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  995. return 1
  996. fi
  997. return 0
  998. }
  999. ## @description Append the `appendstring` if `checkstring` is not
  1000. ## @description present in the given `envvar`
  1001. ## @audience public
  1002. ## @stability stable
  1003. ## @replaceable yes
  1004. ## @param envvar
  1005. ## @param checkstring
  1006. ## @param appendstring
  1007. function hadoop_add_param
  1008. {
  1009. #
  1010. # general param dedupe..
  1011. # $1 is what we are adding to
  1012. # $2 is the name of what we want to add (key)
  1013. # $3 is the key+value of what we're adding
  1014. #
  1015. # doing it this way allows us to support all sorts of
  1016. # different syntaxes, just so long as they are space
  1017. # delimited
  1018. #
  1019. if [[ ! ${!1} =~ $2 ]] ; then
  1020. #shellcheck disable=SC2140
  1021. eval "$1"="'${!1} $3'"
  1022. if [[ ${!1:0:1} = ' ' ]]; then
  1023. #shellcheck disable=SC2140
  1024. eval "$1"="'${!1# }'"
  1025. fi
  1026. hadoop_debug "$1 accepted $3"
  1027. else
  1028. hadoop_debug "$1 declined $3"
  1029. fi
  1030. }
  1031. ## @description Register the given `shellprofile` to the Hadoop
  1032. ## @description shell subsystem
  1033. ## @audience public
  1034. ## @stability stable
  1035. ## @replaceable yes
  1036. ## @param shellprofile
  1037. function hadoop_add_profile
  1038. {
  1039. # shellcheck disable=SC2086
  1040. hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
  1041. }
  1042. ## @description Add a file system object (directory, file,
  1043. ## @description wildcard, ...) to the classpath. Optionally provide
  1044. ## @description a hint as to where in the classpath it should go.
  1045. ## @audience public
  1046. ## @stability stable
  1047. ## @replaceable yes
  1048. ## @param object
  1049. ## @param [before|after]
  1050. ## @return 0 = success (added or duplicate)
  1051. ## @return 1 = failure (doesn't exist or some other reason)
  1052. function hadoop_add_classpath
  1053. {
  1054. # However, with classpath (& JLP), we can do dedupe
  1055. # along with some sanity checking (e.g., missing directories)
  1056. # since we have a better idea of what is legal
  1057. #
  1058. # for wildcard at end, we can
  1059. # at least check the dir exists
  1060. if [[ $1 =~ ^.*\*$ ]]; then
  1061. local mp
  1062. mp=$(dirname "$1")
  1063. if [[ ! -d "${mp}" ]]; then
  1064. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  1065. return 1
  1066. fi
  1067. # no wildcard in the middle, so check existence
  1068. # (doesn't matter *what* it is)
  1069. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  1070. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  1071. return 1
  1072. fi
  1073. if [[ -z "${CLASSPATH}" ]]; then
  1074. CLASSPATH=$1
  1075. hadoop_debug "Initial CLASSPATH=$1"
  1076. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  1077. if [[ "$2" = "before" ]]; then
  1078. CLASSPATH="$1:${CLASSPATH}"
  1079. hadoop_debug "Prepend CLASSPATH: $1"
  1080. else
  1081. CLASSPATH+=:$1
  1082. hadoop_debug "Append CLASSPATH: $1"
  1083. fi
  1084. else
  1085. hadoop_debug "Dupe CLASSPATH: $1"
  1086. fi
  1087. return 0
  1088. }
  1089. ## @description Add a file system object (directory, file,
  1090. ## @description wildcard, ...) to the colonpath. Optionally provide
  1091. ## @description a hint as to where in the colonpath it should go.
  1092. ## @description Prior to adding, objects are checked for duplication
  1093. ## @description and check for existence. Many other functions use
  1094. ## @description this function as their base implementation
  1095. ## @description including `hadoop_add_javalibpath` and `hadoop_add_ldlibpath`.
  1096. ## @audience public
  1097. ## @stability stable
  1098. ## @replaceable yes
  1099. ## @param envvar
  1100. ## @param object
  1101. ## @param [before|after]
  1102. ## @return 0 = success (added or duplicate)
  1103. ## @return 1 = failure (doesn't exist or some other reason)
  1104. function hadoop_add_colonpath
  1105. {
  1106. # this is CLASSPATH, JLP, etc but with dedupe but no
  1107. # other checking
  1108. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  1109. if [[ -z "${!1}" ]]; then
  1110. # shellcheck disable=SC2086
  1111. eval $1="'$2'"
  1112. hadoop_debug "Initial colonpath($1): $2"
  1113. elif [[ "$3" = "before" ]]; then
  1114. # shellcheck disable=SC2086
  1115. eval $1="'$2:${!1}'"
  1116. hadoop_debug "Prepend colonpath($1): $2"
  1117. else
  1118. # shellcheck disable=SC2086
  1119. eval $1+=":'$2'"
  1120. hadoop_debug "Append colonpath($1): $2"
  1121. fi
  1122. return 0
  1123. fi
  1124. hadoop_debug "Rejected colonpath($1): $2"
  1125. return 1
  1126. }
  1127. ## @description Add a file system object (directory, file,
  1128. ## @description wildcard, ...) to the Java JNI path. Optionally
  1129. ## @description provide a hint as to where in the Java JNI path
  1130. ## @description it should go.
  1131. ## @audience public
  1132. ## @stability stable
  1133. ## @replaceable yes
  1134. ## @param object
  1135. ## @param [before|after]
  1136. ## @return 0 = success (added or duplicate)
  1137. ## @return 1 = failure (doesn't exist or some other reason)
  1138. function hadoop_add_javalibpath
  1139. {
  1140. # specialized function for a common use case
  1141. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  1142. }
  1143. ## @description Add a file system object (directory, file,
  1144. ## @description wildcard, ...) to the LD_LIBRARY_PATH. Optionally
  1145. ## @description provide a hint as to where in the LD_LIBRARY_PATH
  1146. ## @description it should go.
  1147. ## @audience public
  1148. ## @stability stable
  1149. ## @replaceable yes
  1150. ## @param object
  1151. ## @param [before|after]
  1152. ## @return 0 = success (added or duplicate)
  1153. ## @return 1 = failure (doesn't exist or some other reason)
  1154. function hadoop_add_ldlibpath
  1155. {
  1156. local status
  1157. # specialized function for a common use case
  1158. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  1159. status=$?
  1160. # note that we export this
  1161. export LD_LIBRARY_PATH
  1162. return ${status}
  1163. }
  1164. ## @description Add the common/core Hadoop components to the
  1165. ## @description environment
  1166. ## @audience private
  1167. ## @stability evolving
  1168. ## @replaceable yes
  1169. ## @returns 1 on failure, may exit
  1170. ## @returns 0 on success
  1171. function hadoop_add_common_to_classpath
  1172. {
  1173. #
  1174. # get all of the common jars+config in the path
  1175. #
  1176. if [[ -z "${HADOOP_COMMON_HOME}"
  1177. || -z "${HADOOP_COMMON_DIR}"
  1178. || -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
  1179. hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
  1180. hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
  1181. hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
  1182. hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
  1183. exit 1
  1184. fi
  1185. # developers
  1186. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  1187. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  1188. fi
  1189. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  1190. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  1191. }
  1192. ## @description Run libexec/tools/module.sh to add to the classpath
  1193. ## @description environment
  1194. ## @audience private
  1195. ## @stability evolving
  1196. ## @replaceable yes
  1197. ## @param module
  1198. function hadoop_add_to_classpath_tools
  1199. {
  1200. declare module=$1
  1201. if [[ -f "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh" ]]; then
  1202. # shellcheck disable=SC1090
  1203. . "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh"
  1204. else
  1205. hadoop_debug "Tools helper ${HADOOP_LIBEXEC_DIR}/tools/${module}.sh was not found."
  1206. fi
  1207. if declare -f hadoop_classpath_tools_${module} >/dev/null 2>&1; then
  1208. "hadoop_classpath_tools_${module}"
  1209. fi
  1210. }
  1211. ## @description Add the user's custom classpath settings to the
  1212. ## @description environment
  1213. ## @audience private
  1214. ## @stability evolving
  1215. ## @replaceable yes
  1216. function hadoop_add_to_classpath_userpath
  1217. {
  1218. # Add the user-specified HADOOP_CLASSPATH to the
  1219. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  1220. # is not set.
  1221. # Add it first or last depending on if user has
  1222. # set env-var HADOOP_USER_CLASSPATH_FIRST
  1223. # we'll also dedupe it, because we're cool like that.
  1224. #
  1225. declare -a array
  1226. declare -i c=0
  1227. declare -i j
  1228. declare -i i
  1229. declare idx
  1230. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  1231. # I wonder if Java runs on VMS.
  1232. for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  1233. array[${c}]=${idx}
  1234. ((c=c+1))
  1235. done
  1236. # bats gets confused by j getting set to 0
  1237. ((j=c-1)) || ${QATESTMODE}
  1238. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  1239. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  1240. for ((i=0; i<=j; i++)); do
  1241. hadoop_add_classpath "${array[$i]}" after
  1242. done
  1243. else
  1244. for ((i=j; i>=0; i--)); do
  1245. hadoop_add_classpath "${array[$i]}" before
  1246. done
  1247. fi
  1248. fi
  1249. fi
  1250. }
  1251. ## @description Routine to configure any OS-specific settings.
  1252. ## @audience public
  1253. ## @stability stable
  1254. ## @replaceable yes
  1255. ## @return may exit on failure conditions
  1256. function hadoop_os_tricks
  1257. {
  1258. local bindv6only
  1259. HADOOP_IS_CYGWIN=false
  1260. case ${HADOOP_OS_TYPE} in
  1261. Darwin)
  1262. if [[ -z "${JAVA_HOME}" ]]; then
  1263. if [[ -x /usr/libexec/java_home ]]; then
  1264. JAVA_HOME="$(/usr/libexec/java_home)"
  1265. export JAVA_HOME
  1266. else
  1267. JAVA_HOME=/Library/Java/Home
  1268. export JAVA_HOME
  1269. fi
  1270. fi
  1271. ;;
  1272. Linux)
  1273. # Newer versions of glibc use an arena memory allocator that
  1274. # causes virtual # memory usage to explode. This interacts badly
  1275. # with the many threads that we use in Hadoop. Tune the variable
  1276. # down to prevent vmem explosion.
  1277. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  1278. # we put this in QA test mode off so that non-Linux can test
  1279. if [[ "${QATESTMODE}" = true ]]; then
  1280. return
  1281. fi
  1282. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  1283. # undocumented in hadoop-env.sh because we don't want users to
  1284. # shoot themselves in the foot while devs make IPv6 work.
  1285. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  1286. if [[ -n "${bindv6only}" ]] &&
  1287. [[ "${bindv6only}" -eq "1" ]] &&
  1288. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  1289. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  1290. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  1291. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  1292. exit 1
  1293. fi
  1294. ;;
  1295. CYGWIN*)
  1296. # Flag that we're running on Cygwin to trigger path translation later.
  1297. HADOOP_IS_CYGWIN=true
  1298. ;;
  1299. esac
  1300. }
  1301. ## @description Configure/verify ${JAVA_HOME}
  1302. ## @audience public
  1303. ## @stability stable
  1304. ## @replaceable yes
  1305. ## @return may exit on failure conditions
  1306. function hadoop_java_setup
  1307. {
  1308. # Bail if we did not detect it
  1309. if [[ -z "${JAVA_HOME}" ]]; then
  1310. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  1311. exit 1
  1312. fi
  1313. if [[ ! -d "${JAVA_HOME}" ]]; then
  1314. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  1315. exit 1
  1316. fi
  1317. JAVA="${JAVA_HOME}/bin/java"
  1318. if [[ ! -x "$JAVA" ]]; then
  1319. hadoop_error "ERROR: $JAVA is not executable."
  1320. exit 1
  1321. fi
  1322. }
  1323. ## @description Finish Java JNI paths prior to execution
  1324. ## @audience private
  1325. ## @stability evolving
  1326. ## @replaceable yes
  1327. function hadoop_finalize_libpaths
  1328. {
  1329. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1330. hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
  1331. hadoop_add_param HADOOP_OPTS java.library.path \
  1332. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1333. export LD_LIBRARY_PATH
  1334. fi
  1335. }
  1336. ## @description Finish Java heap parameters prior to execution
  1337. ## @audience private
  1338. ## @stability evolving
  1339. ## @replaceable yes
  1340. function hadoop_finalize_hadoop_heap
  1341. {
  1342. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  1343. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  1344. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  1345. fi
  1346. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  1347. fi
  1348. # backwards compatibility
  1349. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  1350. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  1351. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  1352. fi
  1353. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  1354. fi
  1355. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  1356. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  1357. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  1358. fi
  1359. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  1360. fi
  1361. }
  1362. ## @description Converts the contents of the variable name
  1363. ## @description `varnameref` into the equivalent Windows path.
  1364. ## @description If the second parameter is true, then `varnameref`
  1365. ## @description is treated as though it was a path list.
  1366. ## @audience public
  1367. ## @stability stable
  1368. ## @replaceable yes
  1369. ## @param varnameref
  1370. ## @param [true]
  1371. function hadoop_translate_cygwin_path
  1372. {
  1373. if [[ "${HADOOP_IS_CYGWIN}" = "true" ]]; then
  1374. if [[ "$2" = "true" ]]; then
  1375. #shellcheck disable=SC2016
  1376. eval "$1"='$(cygpath -p -w "${!1}" 2>/dev/null)'
  1377. else
  1378. #shellcheck disable=SC2016
  1379. eval "$1"='$(cygpath -w "${!1}" 2>/dev/null)'
  1380. fi
  1381. fi
  1382. }
  1383. ## @description Adds the HADOOP_CLIENT_OPTS variable to
  1384. ## @description HADOOP_OPTS if HADOOP_SUBCMD_SUPPORTDAEMONIZATION is false
  1385. ## @audience public
  1386. ## @stability stable
  1387. ## @replaceable yes
  1388. function hadoop_add_client_opts
  1389. {
  1390. if [[ "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" = false
  1391. || -z "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
  1392. hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
  1393. HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
  1394. fi
  1395. }
  1396. ## @description Finish configuring Hadoop specific system properties
  1397. ## @description prior to executing Java
  1398. ## @audience private
  1399. ## @stability evolving
  1400. ## @replaceable yes
  1401. function hadoop_finalize_hadoop_opts
  1402. {
  1403. hadoop_translate_cygwin_path HADOOP_LOG_DIR
  1404. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  1405. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  1406. hadoop_translate_cygwin_path HADOOP_HOME
  1407. export HADOOP_HOME
  1408. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1409. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  1410. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  1411. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  1412. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  1413. }
  1414. ## @description Finish Java classpath prior to execution
  1415. ## @audience private
  1416. ## @stability evolving
  1417. ## @replaceable yes
  1418. function hadoop_finalize_classpath
  1419. {
  1420. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  1421. # user classpath gets added at the last minute. this allows
  1422. # override of CONF dirs and more
  1423. hadoop_add_to_classpath_userpath
  1424. hadoop_translate_cygwin_path CLASSPATH true
  1425. }
  1426. ## @description Finish all the remaining environment settings prior
  1427. ## @description to executing Java. This is a wrapper that calls
  1428. ## @description the other `finalize` routines.
  1429. ## @audience private
  1430. ## @stability evolving
  1431. ## @replaceable yes
  1432. function hadoop_finalize
  1433. {
  1434. hadoop_shellprofiles_finalize
  1435. hadoop_finalize_classpath
  1436. hadoop_finalize_libpaths
  1437. hadoop_finalize_hadoop_heap
  1438. hadoop_finalize_hadoop_opts
  1439. hadoop_translate_cygwin_path HADOOP_HOME
  1440. hadoop_translate_cygwin_path HADOOP_CONF_DIR
  1441. hadoop_translate_cygwin_path HADOOP_COMMON_HOME
  1442. hadoop_translate_cygwin_path HADOOP_HDFS_HOME
  1443. hadoop_translate_cygwin_path HADOOP_YARN_HOME
  1444. hadoop_translate_cygwin_path HADOOP_MAPRED_HOME
  1445. }
  1446. ## @description Print usage information and exit with the passed
  1447. ## @description `exitcode`
  1448. ## @audience public
  1449. ## @stability stable
  1450. ## @replaceable no
  1451. ## @param exitcode
  1452. ## @return This function will always exit.
  1453. function hadoop_exit_with_usage
  1454. {
  1455. local exitcode=$1
  1456. if [[ -z $exitcode ]]; then
  1457. exitcode=1
  1458. fi
  1459. # shellcheck disable=SC2034
  1460. if declare -F hadoop_usage >/dev/null ; then
  1461. hadoop_usage
  1462. elif [[ -x /usr/bin/cowsay ]]; then
  1463. /usr/bin/cowsay -f elephant "Sorry, no help available."
  1464. else
  1465. hadoop_error "Sorry, no help available."
  1466. fi
  1467. exit $exitcode
  1468. }
  1469. ## @description Verify that prerequisites have been met prior to
  1470. ## @description excuting a privileged program.
  1471. ## @audience private
  1472. ## @stability evolving
  1473. ## @replaceable yes
  1474. ## @return This routine may exit.
  1475. function hadoop_verify_secure_prereq
  1476. {
  1477. # if you are on an OS like Illumos that has functional roles
  1478. # and you are using pfexec, you'll probably want to change
  1479. # this.
  1480. if ! hadoop_privilege_check && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  1481. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  1482. exit 1
  1483. else
  1484. return 0
  1485. fi
  1486. }
  1487. ## @audience private
  1488. ## @stability evolving
  1489. ## @replaceable yes
  1490. function hadoop_setup_secure_service
  1491. {
  1492. # need a more complicated setup? replace me!
  1493. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  1494. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  1495. }
  1496. ## @audience private
  1497. ## @stability evolving
  1498. ## @replaceable yes
  1499. function hadoop_verify_piddir
  1500. {
  1501. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  1502. hadoop_error "No pid directory defined."
  1503. exit 1
  1504. fi
  1505. hadoop_mkdir "${HADOOP_PID_DIR}"
  1506. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1507. if [[ $? -gt 0 ]]; then
  1508. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  1509. exit 1
  1510. fi
  1511. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1512. }
  1513. ## @audience private
  1514. ## @stability evolving
  1515. ## @replaceable yes
  1516. function hadoop_verify_logdir
  1517. {
  1518. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  1519. hadoop_error "No log directory defined."
  1520. exit 1
  1521. fi
  1522. hadoop_mkdir "${HADOOP_LOG_DIR}"
  1523. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1524. if [[ $? -gt 0 ]]; then
  1525. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  1526. exit 1
  1527. fi
  1528. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1529. }
  1530. ## @description Determine the status of the daemon referenced
  1531. ## @description by `pidfile`
  1532. ## @audience public
  1533. ## @stability stable
  1534. ## @replaceable yes
  1535. ## @param pidfile
  1536. ## @return (mostly) LSB 4.1.0 compatible status
  1537. function hadoop_status_daemon
  1538. {
  1539. #
  1540. # LSB 4.1.0 compatible status command (1)
  1541. #
  1542. # 0 = program is running
  1543. # 1 = dead, but still a pid (2)
  1544. # 2 = (not used by us)
  1545. # 3 = not running
  1546. #
  1547. # 1 - this is not an endorsement of the LSB
  1548. #
  1549. # 2 - technically, the specification says /var/run/pid, so
  1550. # we should never return this value, but we're giving
  1551. # them the benefit of a doubt and returning 1 even if
  1552. # our pid is not in in /var/run .
  1553. #
  1554. local pidfile=$1
  1555. shift
  1556. local pid
  1557. local pspid
  1558. if [[ -f "${pidfile}" ]]; then
  1559. pid=$(cat "${pidfile}")
  1560. if pspid=$(ps -o args= -p"${pid}" 2>/dev/null); then
  1561. # this is to check that the running process we found is actually the same
  1562. # daemon that we're interested in
  1563. if [[ ${pspid} =~ -Dproc_${daemonname} ]]; then
  1564. return 0
  1565. fi
  1566. fi
  1567. return 1
  1568. fi
  1569. return 3
  1570. }
  1571. ## @description Execute the Java `class`, passing along any `options`.
  1572. ## @description Additionally, set the Java property -Dproc_`command`.
  1573. ## @audience public
  1574. ## @stability stable
  1575. ## @replaceable yes
  1576. ## @param command
  1577. ## @param class
  1578. ## @param [options]
  1579. function hadoop_java_exec
  1580. {
  1581. # run a java command. this is used for
  1582. # non-daemons
  1583. local command=$1
  1584. local class=$2
  1585. shift 2
  1586. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1587. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1588. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1589. hadoop_debug "java: ${JAVA}"
  1590. hadoop_debug "Class name: ${class}"
  1591. hadoop_debug "Command line options: $*"
  1592. export CLASSPATH
  1593. #shellcheck disable=SC2086
  1594. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1595. }
  1596. ## @description Start a non-privileged daemon in the foreground.
  1597. ## @audience private
  1598. ## @stability evolving
  1599. ## @replaceable yes
  1600. ## @param command
  1601. ## @param class
  1602. ## @param pidfile
  1603. ## @param [options]
  1604. function hadoop_start_daemon
  1605. {
  1606. # this is our non-privileged daemon starter
  1607. # that fires up a daemon in the *foreground*
  1608. # so complex! so wow! much java!
  1609. local command=$1
  1610. local class=$2
  1611. local pidfile=$3
  1612. shift 3
  1613. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1614. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1615. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1616. hadoop_debug "java: ${JAVA}"
  1617. hadoop_debug "Class name: ${class}"
  1618. hadoop_debug "Command line options: $*"
  1619. # this is for the non-daemon pid creation
  1620. #shellcheck disable=SC2086
  1621. echo $$ > "${pidfile}" 2>/dev/null
  1622. if [[ $? -gt 0 ]]; then
  1623. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  1624. fi
  1625. export CLASSPATH
  1626. #shellcheck disable=SC2086
  1627. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1628. }
  1629. ## @description Start a non-privileged daemon in the background.
  1630. ## @audience private
  1631. ## @stability evolving
  1632. ## @replaceable yes
  1633. ## @param command
  1634. ## @param class
  1635. ## @param pidfile
  1636. ## @param outfile
  1637. ## @param [options]
  1638. function hadoop_start_daemon_wrapper
  1639. {
  1640. local daemonname=$1
  1641. local class=$2
  1642. local pidfile=$3
  1643. local outfile=$4
  1644. shift 4
  1645. local counter
  1646. hadoop_rotate_log "${outfile}"
  1647. hadoop_start_daemon "${daemonname}" \
  1648. "$class" \
  1649. "${pidfile}" \
  1650. "$@" >> "${outfile}" 2>&1 < /dev/null &
  1651. # we need to avoid a race condition here
  1652. # so let's wait for the fork to finish
  1653. # before overriding with the daemonized pid
  1654. (( counter=0 ))
  1655. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  1656. sleep 1
  1657. (( counter++ ))
  1658. done
  1659. # this is for daemon pid creation
  1660. #shellcheck disable=SC2086
  1661. echo $! > "${pidfile}" 2>/dev/null
  1662. if [[ $? -gt 0 ]]; then
  1663. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  1664. fi
  1665. # shellcheck disable=SC2086
  1666. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1667. if [[ $? -gt 0 ]]; then
  1668. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1669. fi
  1670. # shellcheck disable=SC2086
  1671. disown %+ >/dev/null 2>&1
  1672. if [[ $? -gt 0 ]]; then
  1673. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1674. fi
  1675. sleep 1
  1676. # capture the ulimit output
  1677. ulimit -a >> "${outfile}" 2>&1
  1678. # shellcheck disable=SC2086
  1679. if ! ps -p $! >/dev/null 2>&1; then
  1680. return 1
  1681. fi
  1682. return 0
  1683. }
  1684. ## @description Start a privileged daemon in the foreground.
  1685. ## @audience private
  1686. ## @stability evolving
  1687. ## @replaceable yes
  1688. ## @param command
  1689. ## @param class
  1690. ## @param daemonpidfile
  1691. ## @param daemonoutfile
  1692. ## @param daemonerrfile
  1693. ## @param wrapperpidfile
  1694. ## @param [options]
  1695. function hadoop_start_secure_daemon
  1696. {
  1697. # this is used to launch a secure daemon in the *foreground*
  1698. #
  1699. local daemonname=$1
  1700. local class=$2
  1701. # pid file to create for our daemon
  1702. local daemonpidfile=$3
  1703. # where to send stdout. jsvc has bad habits so this *may* be &1
  1704. # which means you send it to stdout!
  1705. local daemonoutfile=$4
  1706. # where to send stderr. same thing, except &2 = stderr
  1707. local daemonerrfile=$5
  1708. local privpidfile=$6
  1709. shift 6
  1710. hadoop_rotate_log "${daemonoutfile}"
  1711. hadoop_rotate_log "${daemonerrfile}"
  1712. # shellcheck disable=SC2153
  1713. jsvc="${JSVC_HOME}/jsvc"
  1714. if [[ ! -f "${jsvc}" ]]; then
  1715. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  1716. hadoop_error "or privileged daemons. Please download and install jsvc from "
  1717. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  1718. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  1719. exit 1
  1720. fi
  1721. if [[ -z "${HADOOP_DAEMON_JSVC_EXTRA_OPTS}" ]]; then
  1722. # If HADOOP_DAEMON_JSVC_EXTRA_OPTS is not set
  1723. if ${jsvc} -help | grep -q "\-cwd"; then
  1724. # Check if jsvc -help has entry for option -cwd
  1725. hadoop_debug "Your jsvc supports -cwd option." \
  1726. "Adding option '-cwd .'. See HADOOP-16276 for details."
  1727. HADOOP_DAEMON_JSVC_EXTRA_OPTS="-cwd ."
  1728. else
  1729. hadoop_debug "Your jsvc doesn't support -cwd option." \
  1730. "No need to add option '-cwd .'. See HADOOP-16276 for details."
  1731. fi
  1732. else
  1733. hadoop_debug "HADOOP_DAEMON_JSVC_EXTRA_OPTS is set." \
  1734. "Ignoring jsvc -cwd option detection and addition."
  1735. fi
  1736. # note that shellcheck will throw a
  1737. # bogus for-our-use-case 2086 here.
  1738. # it doesn't properly support multi-line situations
  1739. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1740. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1741. hadoop_debug "Final JSVC_HOME: ${JSVC_HOME}"
  1742. hadoop_debug "jsvc: ${jsvc}"
  1743. hadoop_debug "Final HADOOP_DAEMON_JSVC_EXTRA_OPTS: ${HADOOP_DAEMON_JSVC_EXTRA_OPTS}"
  1744. hadoop_debug "Class name: ${class}"
  1745. hadoop_debug "Command line options: $*"
  1746. #shellcheck disable=SC2086
  1747. echo $$ > "${privpidfile}" 2>/dev/null
  1748. if [[ $? -gt 0 ]]; then
  1749. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  1750. fi
  1751. # shellcheck disable=SC2086
  1752. exec "${jsvc}" \
  1753. "-Dproc_${daemonname}" \
  1754. ${HADOOP_DAEMON_JSVC_EXTRA_OPTS} \
  1755. -outfile "${daemonoutfile}" \
  1756. -errfile "${daemonerrfile}" \
  1757. -pidfile "${daemonpidfile}" \
  1758. -nodetach \
  1759. -user "${HADOOP_SECURE_USER}" \
  1760. -cp "${CLASSPATH}" \
  1761. ${HADOOP_OPTS} \
  1762. "${class}" "$@"
  1763. }
  1764. ## @description Start a privileged daemon in the background.
  1765. ## @audience private
  1766. ## @stability evolving
  1767. ## @replaceable yes
  1768. ## @param command
  1769. ## @param class
  1770. ## @param daemonpidfile
  1771. ## @param daemonoutfile
  1772. ## @param wrapperpidfile
  1773. ## @param warpperoutfile
  1774. ## @param daemonerrfile
  1775. ## @param [options]
  1776. function hadoop_start_secure_daemon_wrapper
  1777. {
  1778. # this wraps hadoop_start_secure_daemon to take care
  1779. # of the dirty work to launch a daemon in the background!
  1780. local daemonname=$1
  1781. local class=$2
  1782. # same rules as hadoop_start_secure_daemon except we
  1783. # have some additional parameters
  1784. local daemonpidfile=$3
  1785. local daemonoutfile=$4
  1786. # the pid file of the subprocess that spawned our
  1787. # secure launcher
  1788. local jsvcpidfile=$5
  1789. # the output of the subprocess that spawned our secure
  1790. # launcher
  1791. local jsvcoutfile=$6
  1792. local daemonerrfile=$7
  1793. shift 7
  1794. local counter
  1795. hadoop_rotate_log "${jsvcoutfile}"
  1796. hadoop_start_secure_daemon \
  1797. "${daemonname}" \
  1798. "${class}" \
  1799. "${daemonpidfile}" \
  1800. "${daemonoutfile}" \
  1801. "${daemonerrfile}" \
  1802. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  1803. # we need to avoid a race condition here
  1804. # so let's wait for the fork to finish
  1805. # before overriding with the daemonized pid
  1806. (( counter=0 ))
  1807. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  1808. sleep 1
  1809. (( counter++ ))
  1810. done
  1811. #shellcheck disable=SC2086
  1812. if ! echo $! > "${jsvcpidfile}"; then
  1813. hadoop_error "ERROR: Cannot write ${daemonname} pid ${jsvcpidfile}."
  1814. fi
  1815. sleep 1
  1816. #shellcheck disable=SC2086
  1817. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1818. if [[ $? -gt 0 ]]; then
  1819. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1820. fi
  1821. if [[ -f "${daemonpidfile}" ]]; then
  1822. #shellcheck disable=SC2046
  1823. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  1824. if [[ $? -gt 0 ]]; then
  1825. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  1826. fi
  1827. fi
  1828. #shellcheck disable=SC2046
  1829. disown %+ >/dev/null 2>&1
  1830. if [[ $? -gt 0 ]]; then
  1831. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1832. fi
  1833. # capture the ulimit output
  1834. #shellcheck disable=SC2024
  1835. sudo -u "${HADOOP_SECURE_USER}" bash -c "ulimit -a" >> "${jsvcoutfile}" 2>&1
  1836. #shellcheck disable=SC2086
  1837. if ! ps -p $! >/dev/null 2>&1; then
  1838. return 1
  1839. fi
  1840. return 0
  1841. }
  1842. ## @description Wait till process dies or till timeout
  1843. ## @audience private
  1844. ## @stability evolving
  1845. ## @param pid
  1846. ## @param timeout
  1847. function wait_process_to_die_or_timeout
  1848. {
  1849. local pid=$1
  1850. local timeout=$2
  1851. # Normalize timeout
  1852. # Round up or down
  1853. timeout=$(printf "%.0f\n" "${timeout}")
  1854. if [[ ${timeout} -lt 1 ]]; then
  1855. # minimum 1 second
  1856. timeout=1
  1857. fi
  1858. # Wait to see if it's still alive
  1859. for (( i=0; i < "${timeout}"; i++ ))
  1860. do
  1861. if kill -0 "${pid}" > /dev/null 2>&1; then
  1862. sleep 1
  1863. else
  1864. break
  1865. fi
  1866. done
  1867. }
  1868. ## @description Stop the non-privileged `command` daemon with that
  1869. ## @description that is running at `pidfile`.
  1870. ## @audience public
  1871. ## @stability stable
  1872. ## @replaceable yes
  1873. ## @param command
  1874. ## @param pidfile
  1875. function hadoop_stop_daemon
  1876. {
  1877. local cmd=$1
  1878. local pidfile=$2
  1879. shift 2
  1880. local pid
  1881. local cur_pid
  1882. if [[ -f "${pidfile}" ]]; then
  1883. pid=$(cat "$pidfile")
  1884. kill "${pid}" >/dev/null 2>&1
  1885. wait_process_to_die_or_timeout "${pid}" "${HADOOP_STOP_TIMEOUT}"
  1886. if kill -0 "${pid}" > /dev/null 2>&1; then
  1887. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  1888. kill -9 "${pid}" >/dev/null 2>&1
  1889. fi
  1890. wait_process_to_die_or_timeout "${pid}" "${HADOOP_STOP_TIMEOUT}"
  1891. if ps -p "${pid}" > /dev/null 2>&1; then
  1892. hadoop_error "ERROR: Unable to kill ${pid}"
  1893. else
  1894. cur_pid=$(cat "$pidfile")
  1895. if [[ "${pid}" = "${cur_pid}" ]]; then
  1896. rm -f "${pidfile}" >/dev/null 2>&1
  1897. else
  1898. hadoop_error "WARNING: pid has changed for ${cmd}, skip deleting pid file"
  1899. fi
  1900. fi
  1901. fi
  1902. }
  1903. ## @description Stop the privileged `command` daemon with that
  1904. ## @description that is running at `daemonpidfile` and launched with
  1905. ## @description the wrapper at `wrapperpidfile`.
  1906. ## @audience public
  1907. ## @stability stable
  1908. ## @replaceable yes
  1909. ## @param command
  1910. ## @param daemonpidfile
  1911. ## @param wrapperpidfile
  1912. function hadoop_stop_secure_daemon
  1913. {
  1914. local command=$1
  1915. local daemonpidfile=$2
  1916. local privpidfile=$3
  1917. shift 3
  1918. local ret
  1919. local daemon_pid
  1920. local priv_pid
  1921. local cur_daemon_pid
  1922. local cur_priv_pid
  1923. daemon_pid=$(cat "$daemonpidfile")
  1924. priv_pid=$(cat "$privpidfile")
  1925. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  1926. ret=$?
  1927. cur_daemon_pid=$(cat "$daemonpidfile")
  1928. cur_priv_pid=$(cat "$privpidfile")
  1929. if [[ "${daemon_pid}" = "${cur_daemon_pid}" ]]; then
  1930. rm -f "${daemonpidfile}" >/dev/null 2>&1
  1931. else
  1932. hadoop_error "WARNING: daemon pid has changed for ${command}, skip deleting daemon pid file"
  1933. fi
  1934. if [[ "${priv_pid}" = "${cur_priv_pid}" ]]; then
  1935. rm -f "${privpidfile}" >/dev/null 2>&1
  1936. else
  1937. hadoop_error "WARNING: priv pid has changed for ${command}, skip deleting priv pid file"
  1938. fi
  1939. return ${ret}
  1940. }
  1941. ## @description Manage a non-privileged daemon.
  1942. ## @audience private
  1943. ## @stability evolving
  1944. ## @replaceable yes
  1945. ## @param [start|stop|status|default]
  1946. ## @param command
  1947. ## @param class
  1948. ## @param daemonpidfile
  1949. ## @param daemonoutfile
  1950. ## @param [options]
  1951. function hadoop_daemon_handler
  1952. {
  1953. local daemonmode=$1
  1954. local daemonname=$2
  1955. local class=$3
  1956. local daemon_pidfile=$4
  1957. local daemon_outfile=$5
  1958. shift 5
  1959. case ${daemonmode} in
  1960. status)
  1961. hadoop_status_daemon "${daemon_pidfile}"
  1962. exit $?
  1963. ;;
  1964. stop)
  1965. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  1966. exit $?
  1967. ;;
  1968. ##COMPAT -- older hadoops would also start daemons by default
  1969. start|default)
  1970. hadoop_verify_piddir
  1971. hadoop_verify_logdir
  1972. hadoop_status_daemon "${daemon_pidfile}"
  1973. if [[ $? == 0 ]]; then
  1974. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first and ensure ${daemon_pidfile} file is empty before retry."
  1975. exit 1
  1976. else
  1977. # stale pid file, so just remove it and continue on
  1978. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1979. fi
  1980. ##COMPAT - differenticate between --daemon start and nothing
  1981. # "nothing" shouldn't detach
  1982. if [[ "$daemonmode" = "default" ]]; then
  1983. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  1984. else
  1985. hadoop_start_daemon_wrapper "${daemonname}" \
  1986. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  1987. fi
  1988. ;;
  1989. esac
  1990. }
  1991. ## @description Manage a privileged daemon.
  1992. ## @audience private
  1993. ## @stability evolving
  1994. ## @replaceable yes
  1995. ## @param [start|stop|status|default]
  1996. ## @param command
  1997. ## @param class
  1998. ## @param daemonpidfile
  1999. ## @param daemonoutfile
  2000. ## @param wrapperpidfile
  2001. ## @param wrapperoutfile
  2002. ## @param wrappererrfile
  2003. ## @param [options]
  2004. function hadoop_secure_daemon_handler
  2005. {
  2006. local daemonmode=$1
  2007. local daemonname=$2
  2008. local classname=$3
  2009. local daemon_pidfile=$4
  2010. local daemon_outfile=$5
  2011. local priv_pidfile=$6
  2012. local priv_outfile=$7
  2013. local priv_errfile=$8
  2014. shift 8
  2015. case ${daemonmode} in
  2016. status)
  2017. hadoop_status_daemon "${daemon_pidfile}"
  2018. exit $?
  2019. ;;
  2020. stop)
  2021. hadoop_stop_secure_daemon "${daemonname}" \
  2022. "${daemon_pidfile}" "${priv_pidfile}"
  2023. exit $?
  2024. ;;
  2025. ##COMPAT -- older hadoops would also start daemons by default
  2026. start|default)
  2027. hadoop_verify_piddir
  2028. hadoop_verify_logdir
  2029. hadoop_status_daemon "${daemon_pidfile}"
  2030. if [[ $? == 0 ]]; then
  2031. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first and ensure ${daemon_pidfile} file is empty before retry."
  2032. exit 1
  2033. else
  2034. # stale pid file, so just remove it and continue on
  2035. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  2036. fi
  2037. ##COMPAT - differenticate between --daemon start and nothing
  2038. # "nothing" shouldn't detach
  2039. if [[ "${daemonmode}" = "default" ]]; then
  2040. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  2041. "${daemon_pidfile}" "${daemon_outfile}" \
  2042. "${priv_errfile}" "${priv_pidfile}" "$@"
  2043. else
  2044. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  2045. "${daemon_pidfile}" "${daemon_outfile}" \
  2046. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  2047. fi
  2048. ;;
  2049. esac
  2050. }
  2051. ## @description autodetect whether this is a priv subcmd
  2052. ## @description by whether or not a priv user var exists
  2053. ## @description and if HADOOP_SECURE_CLASSNAME is defined
  2054. ## @audience public
  2055. ## @stability stable
  2056. ## @replaceable yes
  2057. ## @param command
  2058. ## @param subcommand
  2059. ## @return 1 = not priv
  2060. ## @return 0 = priv
  2061. function hadoop_detect_priv_subcmd
  2062. {
  2063. declare program=$1
  2064. declare command=$2
  2065. if [[ -z "${HADOOP_SECURE_CLASSNAME}" ]]; then
  2066. hadoop_debug "No secure classname defined."
  2067. return 1
  2068. fi
  2069. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_USER)
  2070. if [[ -z "${!uvar}" ]]; then
  2071. hadoop_debug "No secure user defined."
  2072. return 1
  2073. fi
  2074. return 0
  2075. }
  2076. ## @description Build custom subcommand var
  2077. ## @audience public
  2078. ## @stability stable
  2079. ## @replaceable yes
  2080. ## @param command
  2081. ## @param subcommand
  2082. ## @param customid
  2083. ## @return string
  2084. function hadoop_build_custom_subcmd_var
  2085. {
  2086. declare program=$1
  2087. declare command=$2
  2088. declare custom=$3
  2089. declare uprogram
  2090. declare ucommand
  2091. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  2092. || [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then
  2093. uprogram=$(echo "${program}" | tr '[:lower:]' '[:upper:]')
  2094. ucommand=$(echo "${command}" | tr '[:lower:]' '[:upper:]')
  2095. else
  2096. uprogram=${program^^}
  2097. ucommand=${command^^}
  2098. fi
  2099. echo "${uprogram}_${ucommand}_${custom}"
  2100. }
  2101. ## @description Verify that username in a var converts to user id
  2102. ## @audience public
  2103. ## @stability stable
  2104. ## @replaceable yes
  2105. ## @param userstring
  2106. ## @return 0 for success
  2107. ## @return 1 for failure
  2108. function hadoop_verify_user_resolves
  2109. {
  2110. declare userstr=$1
  2111. if [[ -z ${userstr} || -z ${!userstr} ]] ; then
  2112. return 1
  2113. fi
  2114. id -u "${!userstr}" >/dev/null 2>&1
  2115. }
  2116. ## @description Verify that ${USER} is allowed to execute the
  2117. ## @description given subcommand.
  2118. ## @audience public
  2119. ## @stability stable
  2120. ## @replaceable yes
  2121. ## @param command
  2122. ## @param subcommand
  2123. ## @return return 0 on success
  2124. ## @return exit 1 on failure
  2125. function hadoop_verify_user_perm
  2126. {
  2127. declare program=$1
  2128. declare command=$2
  2129. declare uvar
  2130. if [[ ${command} =~ \. ]]; then
  2131. return 1
  2132. fi
  2133. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  2134. if [[ -n ${!uvar} ]]; then
  2135. if [[ ${!uvar} != "${USER}" ]]; then
  2136. hadoop_error "ERROR: ${command} can only be executed by ${!uvar}."
  2137. exit 1
  2138. fi
  2139. fi
  2140. return 0
  2141. }
  2142. ## @description Verify that ${USER} is allowed to execute the
  2143. ## @description given subcommand.
  2144. ## @audience public
  2145. ## @stability stable
  2146. ## @replaceable yes
  2147. ## @param subcommand
  2148. ## @return 1 on no re-exec needed
  2149. ## @return 0 on need to re-exec
  2150. function hadoop_need_reexec
  2151. {
  2152. declare program=$1
  2153. declare command=$2
  2154. declare uvar
  2155. # we've already been re-execed, bail
  2156. if [[ "${HADOOP_REEXECED_CMD}" = true ]]; then
  2157. return 1
  2158. fi
  2159. if [[ ${command} =~ \. ]]; then
  2160. return 1
  2161. fi
  2162. # if we have privilege, and the _USER is defined, and _USER is
  2163. # set to someone who isn't us, then yes, we should re-exec.
  2164. # otherwise no, don't re-exec and let the system deal with it.
  2165. if hadoop_privilege_check; then
  2166. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" USER)
  2167. if [[ -n ${!uvar} ]]; then
  2168. if [[ ${!uvar} != "${USER}" ]]; then
  2169. return 0
  2170. fi
  2171. fi
  2172. fi
  2173. return 1
  2174. }
  2175. ## @description Add custom (program)_(command)_OPTS to HADOOP_OPTS.
  2176. ## @description Also handles the deprecated cases from pre-3.x.
  2177. ## @audience public
  2178. ## @stability evolving
  2179. ## @replaceable yes
  2180. ## @param program
  2181. ## @param subcommand
  2182. ## @return will exit on failure conditions
  2183. function hadoop_subcommand_opts
  2184. {
  2185. declare program=$1
  2186. declare command=$2
  2187. declare uvar
  2188. declare depvar
  2189. declare uprogram
  2190. declare ucommand
  2191. if [[ -z "${program}" || -z "${command}" ]]; then
  2192. return 1
  2193. fi
  2194. if [[ ${command} =~ \. ]]; then
  2195. return 1
  2196. fi
  2197. # bash 4 and up have built-in ways to upper and lower
  2198. # case the contents of vars. This is faster than
  2199. # calling tr.
  2200. ## We don't call hadoop_build_custom_subcmd_var here
  2201. ## since we need to construct this for the deprecation
  2202. ## cases. For Hadoop 4.x, this needs to get cleaned up.
  2203. if [[ -z "${BASH_VERSINFO[0]}" ]] \
  2204. || [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then
  2205. uprogram=$(echo "${program}" | tr '[:lower:]' '[:upper:]')
  2206. ucommand=$(echo "${command}" | tr '[:lower:]' '[:upper:]')
  2207. else
  2208. uprogram=${program^^}
  2209. ucommand=${command^^}
  2210. fi
  2211. uvar="${uprogram}_${ucommand}_OPTS"
  2212. # Let's handle all of the deprecation cases early
  2213. # HADOOP_NAMENODE_OPTS -> HDFS_NAMENODE_OPTS
  2214. depvar="HADOOP_${ucommand}_OPTS"
  2215. if [[ "${depvar}" != "${uvar}" ]]; then
  2216. if [[ -n "${!depvar}" ]]; then
  2217. hadoop_deprecate_envvar "${depvar}" "${uvar}"
  2218. fi
  2219. fi
  2220. if [[ -n ${!uvar} ]]; then
  2221. hadoop_debug "Appending ${uvar} onto HADOOP_OPTS"
  2222. HADOOP_OPTS="${HADOOP_OPTS} ${!uvar}"
  2223. return 0
  2224. fi
  2225. }
  2226. ## @description Add custom (program)_(command)_SECURE_EXTRA_OPTS to HADOOP_OPTS.
  2227. ## @description This *does not* handle the pre-3.x deprecated cases
  2228. ## @audience public
  2229. ## @stability stable
  2230. ## @replaceable yes
  2231. ## @param program
  2232. ## @param subcommand
  2233. ## @return will exit on failure conditions
  2234. function hadoop_subcommand_secure_opts
  2235. {
  2236. declare program=$1
  2237. declare command=$2
  2238. declare uvar
  2239. declare uprogram
  2240. declare ucommand
  2241. if [[ -z "${program}" || -z "${command}" ]]; then
  2242. return 1
  2243. fi
  2244. # HDFS_DATANODE_SECURE_EXTRA_OPTS
  2245. # HDFS_NFS3_SECURE_EXTRA_OPTS
  2246. # ...
  2247. uvar=$(hadoop_build_custom_subcmd_var "${program}" "${command}" SECURE_EXTRA_OPTS)
  2248. if [[ -n ${!uvar} ]]; then
  2249. hadoop_debug "Appending ${uvar} onto HADOOP_OPTS"
  2250. HADOOP_OPTS="${HADOOP_OPTS} ${!uvar}"
  2251. return 0
  2252. fi
  2253. }
  2254. ## @description Perform the 'hadoop classpath', etc subcommand with the given
  2255. ## @description parameters
  2256. ## @audience private
  2257. ## @stability evolving
  2258. ## @replaceable yes
  2259. ## @param [parameters]
  2260. ## @return will print & exit with no params
  2261. function hadoop_do_classpath_subcommand
  2262. {
  2263. if [[ "$#" -gt 1 ]]; then
  2264. eval "$1"=org.apache.hadoop.util.Classpath
  2265. else
  2266. hadoop_finalize
  2267. echo "${CLASSPATH}"
  2268. exit 0
  2269. fi
  2270. }
  2271. ## @description generic shell script option parser. sets
  2272. ## @description HADOOP_PARSE_COUNTER to set number the
  2273. ## @description caller should shift
  2274. ## @audience private
  2275. ## @stability evolving
  2276. ## @replaceable yes
  2277. ## @param [parameters, typically "$@"]
  2278. function hadoop_parse_args
  2279. {
  2280. HADOOP_DAEMON_MODE="default"
  2281. HADOOP_PARSE_COUNTER=0
  2282. # not all of the options supported here are supported by all commands
  2283. # however these are:
  2284. hadoop_add_option "--config dir" "Hadoop config directory"
  2285. hadoop_add_option "--debug" "turn on shell script debug mode"
  2286. hadoop_add_option "--help" "usage information"
  2287. while true; do
  2288. hadoop_debug "hadoop_parse_args: processing $1"
  2289. case $1 in
  2290. --buildpaths)
  2291. HADOOP_ENABLE_BUILD_PATHS=true
  2292. shift
  2293. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2294. ;;
  2295. --config)
  2296. shift
  2297. confdir=$1
  2298. shift
  2299. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2300. if [[ -d "${confdir}" ]]; then
  2301. HADOOP_CONF_DIR="${confdir}"
  2302. elif [[ -z "${confdir}" ]]; then
  2303. hadoop_error "ERROR: No parameter provided for --config "
  2304. hadoop_exit_with_usage 1
  2305. else
  2306. hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
  2307. hadoop_exit_with_usage 1
  2308. fi
  2309. ;;
  2310. --daemon)
  2311. shift
  2312. HADOOP_DAEMON_MODE=$1
  2313. shift
  2314. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2315. if [[ -z "${HADOOP_DAEMON_MODE}" || \
  2316. ! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
  2317. hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
  2318. hadoop_exit_with_usage 1
  2319. fi
  2320. ;;
  2321. --debug)
  2322. shift
  2323. HADOOP_SHELL_SCRIPT_DEBUG=true
  2324. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2325. ;;
  2326. --help|-help|-h|help|--h|--\?|-\?|\?)
  2327. hadoop_exit_with_usage 0
  2328. ;;
  2329. --hostnames)
  2330. shift
  2331. HADOOP_WORKER_NAMES="$1"
  2332. shift
  2333. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2334. ;;
  2335. --hosts)
  2336. shift
  2337. hadoop_populate_workers_file "$1"
  2338. shift
  2339. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2340. ;;
  2341. --loglevel)
  2342. shift
  2343. # shellcheck disable=SC2034
  2344. HADOOP_LOGLEVEL="$1"
  2345. shift
  2346. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  2347. ;;
  2348. --reexec)
  2349. shift
  2350. if [[ "${HADOOP_REEXECED_CMD}" = true ]]; then
  2351. hadoop_error "ERROR: re-exec fork bomb prevention: --reexec already called"
  2352. exit 1
  2353. fi
  2354. HADOOP_REEXECED_CMD=true
  2355. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2356. ;;
  2357. --workers)
  2358. shift
  2359. # shellcheck disable=SC2034
  2360. HADOOP_WORKER_MODE=true
  2361. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  2362. ;;
  2363. *)
  2364. break
  2365. ;;
  2366. esac
  2367. done
  2368. hadoop_debug "hadoop_parse: asking caller to skip ${HADOOP_PARSE_COUNTER}"
  2369. }
  2370. ## @description Handle subcommands from main program entries
  2371. ## @audience private
  2372. ## @stability evolving
  2373. ## @replaceable yes
  2374. function hadoop_generic_java_subcmd_handler
  2375. {
  2376. declare priv_outfile
  2377. declare priv_errfile
  2378. declare priv_pidfile
  2379. declare daemon_outfile
  2380. declare daemon_pidfile
  2381. declare secureuser
  2382. # The default/expected way to determine if a daemon is going to run in secure
  2383. # mode is defined by hadoop_detect_priv_subcmd. If this returns true
  2384. # then setup the secure user var and tell the world we're in secure mode
  2385. if hadoop_detect_priv_subcmd "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"; then
  2386. HADOOP_SUBCMD_SECURESERVICE=true
  2387. secureuser=$(hadoop_build_custom_subcmd_var "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}" SECURE_USER)
  2388. if ! hadoop_verify_user_resolves "${secureuser}"; then
  2389. hadoop_error "ERROR: User defined in ${secureuser} (${!secureuser}) does not exist. Aborting."
  2390. exit 1
  2391. fi
  2392. HADOOP_SECURE_USER="${!secureuser}"
  2393. fi
  2394. # check if we're running in secure mode.
  2395. # breaking this up from the above lets 3rd parties
  2396. # do things a bit different
  2397. # secure services require some extra setup
  2398. # if yes, then we need to define all of the priv and daemon stuff
  2399. # if not, then we just need to define daemon stuff.
  2400. # note the daemon vars are purposefully different between the two
  2401. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2402. hadoop_subcommand_secure_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
  2403. hadoop_verify_secure_prereq
  2404. hadoop_setup_secure_service
  2405. priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2406. priv_errfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.err"
  2407. priv_pidfile="${HADOOP_PID_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2408. daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2409. daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2410. else
  2411. daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
  2412. daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
  2413. fi
  2414. # are we actually in daemon mode?
  2415. # if yes, use the daemon logger and the appropriate log file.
  2416. if [[ "${HADOOP_DAEMON_MODE}" != "default" ]]; then
  2417. HADOOP_ROOT_LOGGER="${HADOOP_DAEMON_ROOT_LOGGER}"
  2418. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2419. HADOOP_LOGFILE="hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
  2420. else
  2421. HADOOP_LOGFILE="hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
  2422. fi
  2423. fi
  2424. # finish defining the environment: system properties, env vars, class paths, etc.
  2425. hadoop_finalize
  2426. # do the hard work of launching a daemon or just executing our interactive
  2427. # java class
  2428. if [[ "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" = true ]]; then
  2429. if [[ "${HADOOP_SUBCMD_SECURESERVICE}" = true ]]; then
  2430. hadoop_secure_daemon_handler \
  2431. "${HADOOP_DAEMON_MODE}" \
  2432. "${HADOOP_SUBCMD}" \
  2433. "${HADOOP_SECURE_CLASSNAME}" \
  2434. "${daemon_pidfile}" \
  2435. "${daemon_outfile}" \
  2436. "${priv_pidfile}" \
  2437. "${priv_outfile}" \
  2438. "${priv_errfile}" \
  2439. "${HADOOP_SUBCMD_ARGS[@]}"
  2440. else
  2441. hadoop_daemon_handler \
  2442. "${HADOOP_DAEMON_MODE}" \
  2443. "${HADOOP_SUBCMD}" \
  2444. "${HADOOP_CLASSNAME}" \
  2445. "${daemon_pidfile}" \
  2446. "${daemon_outfile}" \
  2447. "${HADOOP_SUBCMD_ARGS[@]}"
  2448. fi
  2449. exit $?
  2450. else
  2451. hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "${HADOOP_SUBCMD_ARGS[@]}"
  2452. fi
  2453. }