hadoop-functions.sh 60 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # we need to declare this globally as an array, which can only
  17. # be done outside of a function
  18. declare -a HADOOP_SUBCMD_USAGE
  19. declare -a HADOOP_OPTION_USAGE
  20. ## @description Print a message to stderr
  21. ## @audience public
  22. ## @stability stable
  23. ## @replaceable no
  24. ## @param string
  25. function hadoop_error
  26. {
  27. echo "$*" 1>&2
  28. }
  29. ## @description Print a message to stderr if --debug is turned on
  30. ## @audience public
  31. ## @stability stable
  32. ## @replaceable no
  33. ## @param string
  34. function hadoop_debug
  35. {
  36. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  37. echo "DEBUG: $*" 1>&2
  38. fi
  39. }
  40. ## @description Given variable $1 delete $2 from it
  41. ## @audience public
  42. ## @stability stable
  43. ## @replaceable no
  44. function hadoop_delete_entry
  45. {
  46. if [[ ${!1} =~ \ ${2}\ ]] ; then
  47. hadoop_debug "Removing ${2} from ${1}"
  48. eval "${1}"=\""${!1// ${2} }"\"
  49. fi
  50. }
  51. ## @description Given variable $1 add $2 to it
  52. ## @audience public
  53. ## @stability stable
  54. ## @replaceable no
  55. function hadoop_add_entry
  56. {
  57. if [[ ! ${!1} =~ \ ${2}\ ]] ; then
  58. hadoop_debug "Adding ${2} to ${1}"
  59. #shellcheck disable=SC2140
  60. eval "${1}"=\""${!1} ${2} "\"
  61. fi
  62. }
  63. ## @description Given variable $1 determine if $2 is in it
  64. ## @audience public
  65. ## @stability stable
  66. ## @replaceable no
  67. ## @return 0 = yes, 1 = no
  68. function hadoop_verify_entry
  69. {
  70. # this unfortunately can't really be tested by bats. :(
  71. # so if this changes, be aware that unit tests effectively
  72. # do this function in them
  73. [[ ${!1} =~ \ ${2}\ ]]
  74. }
  75. ## @description Add a subcommand to the usage output
  76. ## @audience private
  77. ## @stability evolving
  78. ## @replaceable no
  79. ## @param subcommand
  80. ## @param subcommanddesc
  81. function hadoop_add_subcommand
  82. {
  83. local subcmd=$1
  84. local text=$2
  85. HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${text}"
  86. ((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
  87. }
  88. ## @description Add an option to the usage output
  89. ## @audience private
  90. ## @stability evolving
  91. ## @replaceable no
  92. ## @param subcommand
  93. ## @param subcommanddesc
  94. function hadoop_add_option
  95. {
  96. local option=$1
  97. local text=$2
  98. HADOOP_OPTION_USAGE[${HADOOP_OPTION_USAGE_COUNTER}]="${option}@${text}"
  99. ((HADOOP_OPTION_USAGE_COUNTER=HADOOP_OPTION_USAGE_COUNTER+1))
  100. }
  101. ## @description Reset the usage information to blank
  102. ## @audience private
  103. ## @stability evolving
  104. ## @replaceable no
  105. function hadoop_reset_usage
  106. {
  107. HADOOP_SUBCMD_USAGE=()
  108. HADOOP_OPTION_USAGE=()
  109. HADOOP_SUBCMD_USAGE_COUNTER=0
  110. HADOOP_OPTION_USAGE_COUNTER=0
  111. }
  112. ## @description Print a screen-size aware two-column output
  113. ## @audience private
  114. ## @stability evolving
  115. ## @replaceable no
  116. ## @param array
  117. function hadoop_generic_columnprinter
  118. {
  119. declare -a input=("$@")
  120. declare -i i=0
  121. declare -i counter=0
  122. declare line
  123. declare text
  124. declare option
  125. declare giventext
  126. declare -i maxoptsize
  127. declare -i foldsize
  128. declare -a tmpa
  129. declare numcols
  130. if [[ -n "${COLUMNS}" ]]; then
  131. numcols=${COLUMNS}
  132. else
  133. numcols=$(tput cols) 2>/dev/null
  134. fi
  135. if [[ -z "${numcols}"
  136. || ! "${numcols}" =~ ^[0-9]+$ ]]; then
  137. numcols=75
  138. else
  139. ((numcols=numcols-5))
  140. fi
  141. while read -r line; do
  142. tmpa[${counter}]=${line}
  143. ((counter=counter+1))
  144. option=$(echo "${line}" | cut -f1 -d'@')
  145. if [[ ${#option} -gt ${maxoptsize} ]]; then
  146. maxoptsize=${#option}
  147. fi
  148. done < <(for text in "${input[@]}"; do
  149. echo "${text}"
  150. done | sort)
  151. i=0
  152. ((foldsize=numcols-maxoptsize))
  153. until [[ $i -eq ${#tmpa[@]} ]]; do
  154. option=$(echo "${tmpa[$i]}" | cut -f1 -d'@')
  155. giventext=$(echo "${tmpa[$i]}" | cut -f2 -d'@')
  156. while read -r line; do
  157. printf "%-${maxoptsize}s %-s\n" "${option}" "${line}"
  158. option=" "
  159. done < <(echo "${giventext}"| fold -s -w ${foldsize})
  160. ((i=i+1))
  161. done
  162. }
  163. ## @description generate standard usage output
  164. ## @description and optionally takes a class
  165. ## @audience private
  166. ## @stability evolving
  167. ## @replaceable no
  168. ## @param execname
  169. ## @param true|false
  170. ## @param [text to use in place of SUBCOMMAND]
  171. function hadoop_generate_usage
  172. {
  173. local cmd=$1
  174. local takesclass=$2
  175. local subcmdtext=${3:-"SUBCOMMAND"}
  176. local haveoptions
  177. local optstring
  178. local havesubs
  179. local subcmdstring
  180. cmd=${cmd##*/}
  181. if [[ -n "${HADOOP_OPTION_USAGE_COUNTER}"
  182. && "${HADOOP_OPTION_USAGE_COUNTER}" -gt 0 ]]; then
  183. haveoptions=true
  184. optstring=" [OPTIONS]"
  185. fi
  186. if [[ -n "${HADOOP_SUBCMD_USAGE_COUNTER}"
  187. && "${HADOOP_SUBCMD_USAGE_COUNTER}" -gt 0 ]]; then
  188. havesubs=true
  189. subcmdstring=" ${subcmdtext} [${subcmdtext} OPTIONS]"
  190. fi
  191. echo "Usage: ${cmd}${optstring}${subcmdstring}"
  192. if [[ ${takesclass} = true ]]; then
  193. echo " or ${cmd}${optstring} CLASSNAME [CLASSNAME OPTIONS]"
  194. echo " where CLASSNAME is a user-provided Java class"
  195. fi
  196. if [[ "${haveoptions}" = true ]]; then
  197. echo ""
  198. echo " OPTIONS is none or any of:"
  199. echo ""
  200. hadoop_generic_columnprinter "${HADOOP_OPTION_USAGE[@]}"
  201. fi
  202. if [[ "${havesubs}" = true ]]; then
  203. echo ""
  204. echo " ${subcmdtext} is one of:"
  205. echo ""
  206. hadoop_generic_columnprinter "${HADOOP_SUBCMD_USAGE[@]}"
  207. echo ""
  208. echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
  209. fi
  210. }
  211. ## @description Replace `oldvar` with `newvar` if `oldvar` exists.
  212. ## @audience public
  213. ## @stability stable
  214. ## @replaceable yes
  215. ## @param oldvar
  216. ## @param newvar
  217. function hadoop_deprecate_envvar
  218. {
  219. local oldvar=$1
  220. local newvar=$2
  221. local oldval=${!oldvar}
  222. local newval=${!newvar}
  223. if [[ -n "${oldval}" ]]; then
  224. hadoop_error "WARNING: ${oldvar} has been replaced by ${newvar}. Using value of ${oldvar}."
  225. # shellcheck disable=SC2086
  226. eval ${newvar}=\"${oldval}\"
  227. # shellcheck disable=SC2086
  228. newval=${oldval}
  229. # shellcheck disable=SC2086
  230. eval ${newvar}=\"${newval}\"
  231. fi
  232. }
  233. ## @description Bootstraps the Hadoop shell environment
  234. ## @audience private
  235. ## @stability evolving
  236. ## @replaceable no
  237. function hadoop_bootstrap
  238. {
  239. # the root of the Hadoop installation
  240. # See HADOOP-6255 for the expected directory structure layout
  241. if [[ -n "${DEFAULT_LIBEXEC_DIR}" ]]; then
  242. hadoop_error "WARNING: DEFAULT_LIBEXEC_DIR ignored. It has been replaced by HADOOP_DEFAULT_LIBEXEC_DIR."
  243. fi
  244. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  245. # We can piggyback off of that to figure out where the default
  246. # HADOOP_FREFIX should be. This allows us to run without
  247. # HADOOP_HOME ever being defined by a human! As a consequence
  248. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  249. # env var within Hadoop.
  250. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  251. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  252. exit 1
  253. fi
  254. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  255. HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DEFAULT_PREFIX}
  256. export HADOOP_HOME
  257. #
  258. # short-cuts. vendors may redefine these as well, preferably
  259. # in hadoop-layouts.sh
  260. #
  261. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  262. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  263. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  264. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  265. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  266. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  267. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  268. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  269. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  270. HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}}
  271. HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
  272. HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}
  273. # usage output set to zero
  274. hadoop_reset_usage
  275. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  276. # defaults
  277. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  278. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  279. }
  280. ## @description Locate Hadoop's configuration directory
  281. ## @audience private
  282. ## @stability evolving
  283. ## @replaceable no
  284. function hadoop_find_confdir
  285. {
  286. local conf_dir
  287. # An attempt at compatibility with some Hadoop 1.x
  288. # installs.
  289. if [[ -e "${HADOOP_HOME}/conf/hadoop-env.sh" ]]; then
  290. conf_dir="conf"
  291. else
  292. conf_dir="etc/hadoop"
  293. fi
  294. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_HOME}/${conf_dir}}"
  295. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  296. }
  297. ## @description Validate ${HADOOP_CONF_DIR}
  298. ## @audience public
  299. ## @stability stable
  300. ## @replaceable yes
  301. ## @return will exit on failure conditions
  302. function hadoop_verify_confdir
  303. {
  304. # Check only log4j.properties by default.
  305. # --loglevel does not work without logger settings in log4j.log4j.properties.
  306. if [[ ! -f "${HADOOP_CONF_DIR}/log4j.properties" ]]; then
  307. hadoop_error "WARNING: log4j.properties is not found. HADOOP_CONF_DIR may be incomplete."
  308. fi
  309. }
  310. ## @description Import the hadoop-env.sh settings
  311. ## @audience private
  312. ## @stability evolving
  313. ## @replaceable no
  314. function hadoop_exec_hadoopenv
  315. {
  316. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  317. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  318. export HADOOP_ENV_PROCESSED=true
  319. # shellcheck disable=SC1090
  320. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  321. fi
  322. fi
  323. }
  324. ## @description Import the replaced functions
  325. ## @audience private
  326. ## @stability evolving
  327. ## @replaceable no
  328. function hadoop_exec_userfuncs
  329. {
  330. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  331. # shellcheck disable=SC1090
  332. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  333. fi
  334. }
  335. ## @description Read the user's settings. This provides for users to
  336. ## @description override and/or append hadoop-env.sh. It is not meant
  337. ## @description as a complete system override.
  338. ## @audience private
  339. ## @stability evolving
  340. ## @replaceable yes
  341. function hadoop_exec_user_hadoopenv
  342. {
  343. if [[ -f "${HOME}/.hadoop-env" ]]; then
  344. hadoop_debug "Applying the user's .hadoop-env"
  345. # shellcheck disable=SC1090
  346. . "${HOME}/.hadoop-env"
  347. fi
  348. }
  349. ## @description Read the user's settings. This provides for users to
  350. ## @description run Hadoop Shell API after system bootstrap
  351. ## @audience private
  352. ## @stability evolving
  353. ## @replaceable yes
  354. function hadoop_exec_hadooprc
  355. {
  356. if [[ -f "${HOME}/.hadooprc" ]]; then
  357. hadoop_debug "Applying the user's .hadooprc"
  358. # shellcheck disable=SC1090
  359. . "${HOME}/.hadooprc"
  360. fi
  361. }
  362. ## @description Import shellprofile.d content
  363. ## @audience private
  364. ## @stability evolving
  365. ## @replaceable yes
  366. function hadoop_import_shellprofiles
  367. {
  368. local i
  369. local files1
  370. local files2
  371. if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
  372. files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
  373. hadoop_debug "shellprofiles: ${files1[*]}"
  374. else
  375. hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
  376. fi
  377. if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
  378. files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
  379. fi
  380. # enable bundled shellprofiles that come
  381. # from hadoop-tools. This converts the user-facing HADOOP_OPTIONAL_TOOLS
  382. # to the HADOOP_TOOLS_OPTIONS that the shell profiles expect.
  383. # See dist-tools-hooks-maker for how the example HADOOP_OPTIONAL_TOOLS
  384. # gets populated into hadoop-env.sh
  385. for i in ${HADOOP_OPTIONAL_TOOLS//,/ }; do
  386. hadoop_add_entry HADOOP_TOOLS_OPTIONS "${i}"
  387. done
  388. for i in "${files1[@]}" "${files2[@]}"
  389. do
  390. if [[ -n "${i}"
  391. && -f "${i}" ]]; then
  392. hadoop_debug "Profiles: importing ${i}"
  393. # shellcheck disable=SC1090
  394. . "${i}"
  395. fi
  396. done
  397. }
  398. ## @description Initialize the registered shell profiles
  399. ## @audience private
  400. ## @stability evolving
  401. ## @replaceable yes
  402. function hadoop_shellprofiles_init
  403. {
  404. local i
  405. for i in ${HADOOP_SHELL_PROFILES}
  406. do
  407. if declare -F _${i}_hadoop_init >/dev/null ; then
  408. hadoop_debug "Profiles: ${i} init"
  409. # shellcheck disable=SC2086
  410. _${i}_hadoop_init
  411. fi
  412. done
  413. }
  414. ## @description Apply the shell profile classpath additions
  415. ## @audience private
  416. ## @stability evolving
  417. ## @replaceable yes
  418. function hadoop_shellprofiles_classpath
  419. {
  420. local i
  421. for i in ${HADOOP_SHELL_PROFILES}
  422. do
  423. if declare -F _${i}_hadoop_classpath >/dev/null ; then
  424. hadoop_debug "Profiles: ${i} classpath"
  425. # shellcheck disable=SC2086
  426. _${i}_hadoop_classpath
  427. fi
  428. done
  429. }
  430. ## @description Apply the shell profile native library additions
  431. ## @audience private
  432. ## @stability evolving
  433. ## @replaceable yes
  434. function hadoop_shellprofiles_nativelib
  435. {
  436. local i
  437. for i in ${HADOOP_SHELL_PROFILES}
  438. do
  439. if declare -F _${i}_hadoop_nativelib >/dev/null ; then
  440. hadoop_debug "Profiles: ${i} nativelib"
  441. # shellcheck disable=SC2086
  442. _${i}_hadoop_nativelib
  443. fi
  444. done
  445. }
  446. ## @description Apply the shell profile final configuration
  447. ## @audience private
  448. ## @stability evolving
  449. ## @replaceable yes
  450. function hadoop_shellprofiles_finalize
  451. {
  452. local i
  453. for i in ${HADOOP_SHELL_PROFILES}
  454. do
  455. if declare -F _${i}_hadoop_finalize >/dev/null ; then
  456. hadoop_debug "Profiles: ${i} finalize"
  457. # shellcheck disable=SC2086
  458. _${i}_hadoop_finalize
  459. fi
  460. done
  461. }
  462. ## @description Initialize the Hadoop shell environment, now that
  463. ## @description user settings have been imported
  464. ## @audience private
  465. ## @stability evolving
  466. ## @replaceable no
  467. function hadoop_basic_init
  468. {
  469. # Some of these are also set in hadoop-env.sh.
  470. # we still set them here just in case hadoop-env.sh is
  471. # broken in some way, set up defaults, etc.
  472. #
  473. # but it is important to note that if you update these
  474. # you also need to update hadoop-env.sh as well!!!
  475. CLASSPATH=""
  476. hadoop_debug "Initialize CLASSPATH"
  477. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  478. [[ -d "${HADOOP_HOME}/${HADOOP_COMMON_DIR}" ]]; then
  479. export HADOOP_COMMON_HOME="${HADOOP_HOME}"
  480. fi
  481. # default policy file for service-level authorization
  482. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  483. # define HADOOP_HDFS_HOME
  484. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  485. [[ -d "${HADOOP_HOME}/${HDFS_DIR}" ]]; then
  486. export HADOOP_HDFS_HOME="${HADOOP_HOME}"
  487. fi
  488. # define HADOOP_YARN_HOME
  489. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  490. [[ -d "${HADOOP_HOME}/${YARN_DIR}" ]]; then
  491. export HADOOP_YARN_HOME="${HADOOP_HOME}"
  492. fi
  493. # define HADOOP_MAPRED_HOME
  494. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  495. [[ -d "${HADOOP_HOME}/${MAPRED_DIR}" ]]; then
  496. export HADOOP_MAPRED_HOME="${HADOOP_HOME}"
  497. fi
  498. if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
  499. hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
  500. exit 1
  501. fi
  502. if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
  503. hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
  504. exit 1
  505. fi
  506. if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
  507. hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
  508. exit 1
  509. fi
  510. if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
  511. hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
  512. exit 1
  513. fi
  514. # if for some reason the shell doesn't have $USER defined
  515. # let's define it as 'hadoop'
  516. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  517. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-hadoop}
  518. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_HOME}/logs"}
  519. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  520. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  521. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  522. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  523. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  524. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  525. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  526. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  527. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS:-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  528. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  529. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  530. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  531. }
  532. ## @description Set the worker support information to the contents
  533. ## @description of `filename`
  534. ## @audience public
  535. ## @stability stable
  536. ## @replaceable no
  537. ## @param filename
  538. ## @return will exit if file does not exist
  539. function hadoop_populate_workers_file
  540. {
  541. local workersfile=$1
  542. shift
  543. if [[ -f "${workersfile}" ]]; then
  544. # shellcheck disable=2034
  545. HADOOP_WORKERS="${workersfile}"
  546. elif [[ -f "${HADOOP_CONF_DIR}/${workersfile}" ]]; then
  547. # shellcheck disable=2034
  548. HADOOP_WORKERS="${HADOOP_CONF_DIR}/${workersfile}"
  549. else
  550. hadoop_error "ERROR: Cannot find hosts file \"${workersfile}\""
  551. hadoop_exit_with_usage 1
  552. fi
  553. }
  554. ## @description Rotates the given `file` until `number` of
  555. ## @description files exist.
  556. ## @audience public
  557. ## @stability stable
  558. ## @replaceable no
  559. ## @param filename
  560. ## @param [number]
  561. ## @return $? will contain last mv's return value
  562. function hadoop_rotate_log
  563. {
  564. #
  565. # Users are likely to replace this one for something
  566. # that gzips or uses dates or who knows what.
  567. #
  568. # be aware that &1 and &2 might go through here
  569. # so don't do anything too crazy...
  570. #
  571. local log=$1;
  572. local num=${2:-5};
  573. if [[ -f "${log}" ]]; then # rotate logs
  574. while [[ ${num} -gt 1 ]]; do
  575. #shellcheck disable=SC2086
  576. let prev=${num}-1
  577. if [[ -f "${log}.${prev}" ]]; then
  578. mv "${log}.${prev}" "${log}.${num}"
  579. fi
  580. num=${prev}
  581. done
  582. mv "${log}" "${log}.${num}"
  583. fi
  584. }
  585. ## @description Via ssh, log into `hostname` and run `command`
  586. ## @audience private
  587. ## @stability evolving
  588. ## @replaceable yes
  589. ## @param hostname
  590. ## @param command
  591. ## @param [...]
  592. function hadoop_actual_ssh
  593. {
  594. # we are passing this function to xargs
  595. # should get hostname followed by rest of command line
  596. local worker=$1
  597. shift
  598. # shellcheck disable=SC2086
  599. ssh ${HADOOP_SSH_OPTS} ${worker} $"${@// /\\ }" 2>&1 | sed "s/^/$worker: /"
  600. }
  601. ## @description Connect to ${HADOOP_WORKERS} or ${HADOOP_WORKER_NAMES}
  602. ## @description and execute command.
  603. ## @audience private
  604. ## @stability evolving
  605. ## @replaceable yes
  606. ## @param command
  607. ## @param [...]
  608. function hadoop_connect_to_hosts
  609. {
  610. # shellcheck disable=SC2124
  611. local params="$@"
  612. local worker_file
  613. local tmpslvnames
  614. #
  615. # ssh (or whatever) to a host
  616. #
  617. # User can specify hostnames or a file where the hostnames are (not both)
  618. if [[ -n "${HADOOP_WORKERS}" && -n "${HADOOP_WORKER_NAMES}" ]] ; then
  619. hadoop_error "ERROR: Both HADOOP_WORKERS and HADOOP_WORKER_NAMES were defined. Aborting."
  620. exit 1
  621. elif [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  622. if [[ -n "${HADOOP_WORKERS}" ]]; then
  623. worker_file=${HADOOP_WORKERS}
  624. elif [[ -f "${HADOOP_CONF_DIR}/workers" ]]; then
  625. worker_file=${HADOOP_CONF_DIR}/workers
  626. elif [[ -f "${HADOOP_CONF_DIR}/slaves" ]]; then
  627. hadoop_error "WARNING: 'slaves' file has been deprecated. Please use 'workers' file instead."
  628. worker_file=${HADOOP_CONF_DIR}/slaves
  629. fi
  630. fi
  631. # if pdsh is available, let's use it. otherwise default
  632. # to a loop around ssh. (ugh)
  633. if [[ -e '/usr/bin/pdsh' ]]; then
  634. if [[ -z "${HADOOP_WORKER_NAMES}" ]] ; then
  635. # if we were given a file, just let pdsh deal with it.
  636. # shellcheck disable=SC2086
  637. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  638. -f "${HADOOP_SSH_PARALLEL}" -w ^"${worker_file}" $"${@// /\\ }" 2>&1
  639. else
  640. # no spaces allowed in the pdsh arg host list
  641. # shellcheck disable=SC2086
  642. tmpslvnames=$(echo ${HADOOP_WORKER_NAMES} | tr -s ' ' ,)
  643. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  644. -f "${HADOOP_SSH_PARALLEL}" \
  645. -w "${tmpslvnames}" $"${@// /\\ }" 2>&1
  646. fi
  647. else
  648. if [[ -z "${HADOOP_WORKER_NAMES}" ]]; then
  649. HADOOP_WORKER_NAMES=$(sed 's/#.*$//;/^$/d' "${worker_file}")
  650. fi
  651. hadoop_connect_to_hosts_without_pdsh "${params}"
  652. fi
  653. }
  654. ## @description Connect to ${HADOOP_WORKER_NAMES} and execute command
  655. ## @description under the environment which does not support pdsh.
  656. ## @audience private
  657. ## @stability evolving
  658. ## @replaceable yes
  659. ## @param command
  660. ## @param [...]
  661. function hadoop_connect_to_hosts_without_pdsh
  662. {
  663. # shellcheck disable=SC2124
  664. local params="$@"
  665. local workers=(${HADOOP_WORKER_NAMES})
  666. for (( i = 0; i < ${#workers[@]}; i++ ))
  667. do
  668. if (( i != 0 && i % HADOOP_SSH_PARALLEL == 0 )); then
  669. wait
  670. fi
  671. # shellcheck disable=SC2086
  672. hadoop_actual_ssh "${workers[$i]}" ${params} &
  673. done
  674. wait
  675. }
  676. ## @description Utility routine to handle --workers mode
  677. ## @audience private
  678. ## @stability evolving
  679. ## @replaceable yes
  680. ## @param commandarray
  681. function hadoop_common_worker_mode_execute
  682. {
  683. #
  684. # input should be the command line as given by the user
  685. # in the form of an array
  686. #
  687. local argv=("$@")
  688. # if --workers is still on the command line, remove it
  689. # to prevent loops
  690. # Also remove --hostnames and --hosts along with arg values
  691. local argsSize=${#argv[@]};
  692. for (( i = 0; i < argsSize; i++ ))
  693. do
  694. if [[ "${argv[$i]}" =~ ^--workers$ ]]; then
  695. unset argv[$i]
  696. elif [[ "${argv[$i]}" =~ ^--hostnames$ ]] ||
  697. [[ "${argv[$i]}" =~ ^--hosts$ ]]; then
  698. unset argv[$i];
  699. let i++;
  700. unset argv[$i];
  701. fi
  702. done
  703. if [[ ${QATESTMODE} = true ]]; then
  704. echo "${argv[@]}"
  705. return
  706. fi
  707. hadoop_connect_to_hosts -- "${argv[@]}"
  708. }
  709. ## @description Verify that a shell command was passed a valid
  710. ## @description class name
  711. ## @audience public
  712. ## @stability stable
  713. ## @replaceable yes
  714. ## @param classname
  715. ## @return 0 = success
  716. ## @return 1 = failure w/user message
  717. function hadoop_validate_classname
  718. {
  719. local class=$1
  720. shift 1
  721. if [[ ! ${class} =~ \. ]]; then
  722. # assuming the arg is typo of command if it does not conatain ".".
  723. # class belonging to no package is not allowed as a result.
  724. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  725. return 1
  726. fi
  727. return 0
  728. }
  729. ## @description Append the `appendstring` if `checkstring` is not
  730. ## @description present in the given `envvar`
  731. ## @audience public
  732. ## @stability stable
  733. ## @replaceable yes
  734. ## @param envvar
  735. ## @param checkstring
  736. ## @param appendstring
  737. function hadoop_add_param
  738. {
  739. #
  740. # general param dedupe..
  741. # $1 is what we are adding to
  742. # $2 is the name of what we want to add (key)
  743. # $3 is the key+value of what we're adding
  744. #
  745. # doing it this way allows us to support all sorts of
  746. # different syntaxes, just so long as they are space
  747. # delimited
  748. #
  749. if [[ ! ${!1} =~ $2 ]] ; then
  750. #shellcheck disable=SC2140
  751. eval "$1"="'${!1} $3'"
  752. if [[ ${!1:0:1} = ' ' ]]; then
  753. #shellcheck disable=SC2140
  754. eval "$1"="'${!1# }'"
  755. fi
  756. hadoop_debug "$1 accepted $3"
  757. else
  758. hadoop_debug "$1 declined $3"
  759. fi
  760. }
  761. ## @description Register the given `shellprofile` to the Hadoop
  762. ## @description shell subsystem
  763. ## @audience public
  764. ## @stability stable
  765. ## @replaceable yes
  766. ## @param shellprofile
  767. function hadoop_add_profile
  768. {
  769. # shellcheck disable=SC2086
  770. hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
  771. }
  772. ## @description Add a file system object (directory, file,
  773. ## @description wildcard, ...) to the classpath. Optionally provide
  774. ## @description a hint as to where in the classpath it should go.
  775. ## @audience public
  776. ## @stability stable
  777. ## @replaceable yes
  778. ## @param object
  779. ## @param [before|after]
  780. ## @return 0 = success (added or duplicate)
  781. ## @return 1 = failure (doesn't exist or some other reason)
  782. function hadoop_add_classpath
  783. {
  784. # However, with classpath (& JLP), we can do dedupe
  785. # along with some sanity checking (e.g., missing directories)
  786. # since we have a better idea of what is legal
  787. #
  788. # for wildcard at end, we can
  789. # at least check the dir exists
  790. if [[ $1 =~ ^.*\*$ ]]; then
  791. local mp
  792. mp=$(dirname "$1")
  793. if [[ ! -d "${mp}" ]]; then
  794. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  795. return 1
  796. fi
  797. # no wildcard in the middle, so check existence
  798. # (doesn't matter *what* it is)
  799. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  800. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  801. return 1
  802. fi
  803. if [[ -z "${CLASSPATH}" ]]; then
  804. CLASSPATH=$1
  805. hadoop_debug "Initial CLASSPATH=$1"
  806. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  807. if [[ "$2" = "before" ]]; then
  808. CLASSPATH="$1:${CLASSPATH}"
  809. hadoop_debug "Prepend CLASSPATH: $1"
  810. else
  811. CLASSPATH+=:$1
  812. hadoop_debug "Append CLASSPATH: $1"
  813. fi
  814. else
  815. hadoop_debug "Dupe CLASSPATH: $1"
  816. fi
  817. return 0
  818. }
  819. ## @description Add a file system object (directory, file,
  820. ## @description wildcard, ...) to the colonpath. Optionally provide
  821. ## @description a hint as to where in the colonpath it should go.
  822. ## @description Prior to adding, objects are checked for duplication
  823. ## @description and check for existence. Many other functions use
  824. ## @description this function as their base implementation
  825. ## @description including `hadoop_add_javalibpath` and `hadoop_add_ldlibpath`.
  826. ## @audience public
  827. ## @stability stable
  828. ## @replaceable yes
  829. ## @param envvar
  830. ## @param object
  831. ## @param [before|after]
  832. ## @return 0 = success (added or duplicate)
  833. ## @return 1 = failure (doesn't exist or some other reason)
  834. function hadoop_add_colonpath
  835. {
  836. # this is CLASSPATH, JLP, etc but with dedupe but no
  837. # other checking
  838. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  839. if [[ -z "${!1}" ]]; then
  840. # shellcheck disable=SC2086
  841. eval $1="'$2'"
  842. hadoop_debug "Initial colonpath($1): $2"
  843. elif [[ "$3" = "before" ]]; then
  844. # shellcheck disable=SC2086
  845. eval $1="'$2:${!1}'"
  846. hadoop_debug "Prepend colonpath($1): $2"
  847. else
  848. # shellcheck disable=SC2086
  849. eval $1+=":'$2'"
  850. hadoop_debug "Append colonpath($1): $2"
  851. fi
  852. return 0
  853. fi
  854. hadoop_debug "Rejected colonpath($1): $2"
  855. return 1
  856. }
  857. ## @description Add a file system object (directory, file,
  858. ## @description wildcard, ...) to the Java JNI path. Optionally
  859. ## @description provide a hint as to where in the Java JNI path
  860. ## @description it should go.
  861. ## @audience public
  862. ## @stability stable
  863. ## @replaceable yes
  864. ## @param object
  865. ## @param [before|after]
  866. ## @return 0 = success (added or duplicate)
  867. ## @return 1 = failure (doesn't exist or some other reason)
  868. function hadoop_add_javalibpath
  869. {
  870. # specialized function for a common use case
  871. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  872. }
  873. ## @description Add a file system object (directory, file,
  874. ## @description wildcard, ...) to the LD_LIBRARY_PATH. Optionally
  875. ## @description provide a hint as to where in the LD_LIBRARY_PATH
  876. ## @description it should go.
  877. ## @audience public
  878. ## @stability stable
  879. ## @replaceable yes
  880. ## @param object
  881. ## @param [before|after]
  882. ## @return 0 = success (added or duplicate)
  883. ## @return 1 = failure (doesn't exist or some other reason)
  884. function hadoop_add_ldlibpath
  885. {
  886. local status
  887. # specialized function for a common use case
  888. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  889. status=$?
  890. # note that we export this
  891. export LD_LIBRARY_PATH
  892. return ${status}
  893. }
  894. ## @description Add the common/core Hadoop components to the
  895. ## @description environment
  896. ## @audience private
  897. ## @stability evolving
  898. ## @replaceable yes
  899. ## @returns 1 on failure, may exit
  900. ## @returns 0 on success
  901. function hadoop_add_common_to_classpath
  902. {
  903. #
  904. # get all of the common jars+config in the path
  905. #
  906. if [[ -z "${HADOOP_COMMON_HOME}"
  907. || -z "${HADOOP_COMMON_DIR}"
  908. || -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
  909. hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
  910. hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
  911. hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
  912. hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
  913. exit 1
  914. fi
  915. # developers
  916. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  917. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  918. fi
  919. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  920. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  921. }
  922. ## @description Run libexec/tools/module.sh to add to the classpath
  923. ## @description environment
  924. ## @audience private
  925. ## @stability evolving
  926. ## @replaceable yes
  927. ## @param module
  928. function hadoop_add_to_classpath_tools
  929. {
  930. declare module=$1
  931. if [[ -f "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh" ]]; then
  932. # shellcheck disable=SC1090
  933. . "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh"
  934. else
  935. hadoop_error "ERROR: Tools helper ${HADOOP_LIBEXEC_DIR}/tools/${module}.sh was not found."
  936. fi
  937. if declare -f hadoop_classpath_tools_${module} >/dev/null 2>&1; then
  938. "hadoop_classpath_tools_${module}"
  939. fi
  940. }
  941. ## @description Add the user's custom classpath settings to the
  942. ## @description environment
  943. ## @audience private
  944. ## @stability evolving
  945. ## @replaceable yes
  946. function hadoop_add_to_classpath_userpath
  947. {
  948. # Add the user-specified HADOOP_CLASSPATH to the
  949. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  950. # is not set.
  951. # Add it first or last depending on if user has
  952. # set env-var HADOOP_USER_CLASSPATH_FIRST
  953. # we'll also dedupe it, because we're cool like that.
  954. #
  955. declare -a array
  956. declare -i c=0
  957. declare -i j
  958. declare -i i
  959. declare idx
  960. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  961. # I wonder if Java runs on VMS.
  962. for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  963. array[${c}]=${idx}
  964. ((c=c+1))
  965. done
  966. # bats gets confused by j getting set to 0
  967. ((j=c-1)) || ${QATESTMODE}
  968. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  969. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  970. for ((i=0; i<=j; i++)); do
  971. hadoop_add_classpath "${array[$i]}" after
  972. done
  973. else
  974. for ((i=j; i>=0; i--)); do
  975. hadoop_add_classpath "${array[$i]}" before
  976. done
  977. fi
  978. fi
  979. fi
  980. }
  981. ## @description Routine to configure any OS-specific settings.
  982. ## @audience public
  983. ## @stability stable
  984. ## @replaceable yes
  985. ## @return may exit on failure conditions
  986. function hadoop_os_tricks
  987. {
  988. local bindv6only
  989. HADOOP_IS_CYGWIN=false
  990. case ${HADOOP_OS_TYPE} in
  991. Darwin)
  992. if [[ -z "${JAVA_HOME}" ]]; then
  993. if [[ -x /usr/libexec/java_home ]]; then
  994. JAVA_HOME="$(/usr/libexec/java_home)"
  995. export JAVA_HOME
  996. else
  997. JAVA_HOME=/Library/Java/Home
  998. export JAVA_HOME
  999. fi
  1000. fi
  1001. ;;
  1002. Linux)
  1003. # Newer versions of glibc use an arena memory allocator that
  1004. # causes virtual # memory usage to explode. This interacts badly
  1005. # with the many threads that we use in Hadoop. Tune the variable
  1006. # down to prevent vmem explosion.
  1007. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  1008. # we put this in QA test mode off so that non-Linux can test
  1009. if [[ "${QATESTMODE}" = true ]]; then
  1010. return
  1011. fi
  1012. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  1013. # undocumented in hadoop-env.sh because we don't want users to
  1014. # shoot themselves in the foot while devs make IPv6 work.
  1015. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  1016. if [[ -n "${bindv6only}" ]] &&
  1017. [[ "${bindv6only}" -eq "1" ]] &&
  1018. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  1019. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  1020. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  1021. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  1022. exit 1
  1023. fi
  1024. ;;
  1025. CYGWIN*)
  1026. # Flag that we're running on Cygwin to trigger path translation later.
  1027. HADOOP_IS_CYGWIN=true
  1028. ;;
  1029. esac
  1030. }
  1031. ## @description Configure/verify ${JAVA_HOME}
  1032. ## @audience public
  1033. ## @stability stable
  1034. ## @replaceable yes
  1035. ## @return may exit on failure conditions
  1036. function hadoop_java_setup
  1037. {
  1038. # Bail if we did not detect it
  1039. if [[ -z "${JAVA_HOME}" ]]; then
  1040. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  1041. exit 1
  1042. fi
  1043. if [[ ! -d "${JAVA_HOME}" ]]; then
  1044. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  1045. exit 1
  1046. fi
  1047. JAVA="${JAVA_HOME}/bin/java"
  1048. if [[ ! -x "$JAVA" ]]; then
  1049. hadoop_error "ERROR: $JAVA is not executable."
  1050. exit 1
  1051. fi
  1052. }
  1053. ## @description Finish Java JNI paths prior to execution
  1054. ## @audience private
  1055. ## @stability evolving
  1056. ## @replaceable yes
  1057. function hadoop_finalize_libpaths
  1058. {
  1059. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1060. hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
  1061. hadoop_add_param HADOOP_OPTS java.library.path \
  1062. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1063. export LD_LIBRARY_PATH
  1064. fi
  1065. }
  1066. ## @description Finish Java heap parameters prior to execution
  1067. ## @audience private
  1068. ## @stability evolving
  1069. ## @replaceable yes
  1070. function hadoop_finalize_hadoop_heap
  1071. {
  1072. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  1073. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  1074. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  1075. fi
  1076. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  1077. fi
  1078. # backwards compatibility
  1079. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  1080. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  1081. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  1082. fi
  1083. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  1084. fi
  1085. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  1086. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  1087. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  1088. fi
  1089. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  1090. fi
  1091. }
  1092. ## @description Converts the contents of the variable name
  1093. ## @description `varnameref` into the equivalent Windows path.
  1094. ## @description If the second parameter is true, then `varnameref`
  1095. ## @description is treated as though it was a path list.
  1096. ## @audience public
  1097. ## @stability stable
  1098. ## @replaceable yes
  1099. ## @param varnameref
  1100. ## @param [true]
  1101. function hadoop_translate_cygwin_path
  1102. {
  1103. if [[ "${HADOOP_IS_CYGWIN}" = "true" ]]; then
  1104. if [[ "$2" = "true" ]]; then
  1105. #shellcheck disable=SC2016
  1106. eval "$1"='$(cygpath -p -w "${!1}" 2>/dev/null)'
  1107. else
  1108. #shellcheck disable=SC2016
  1109. eval "$1"='$(cygpath -w "${!1}" 2>/dev/null)'
  1110. fi
  1111. fi
  1112. }
  1113. ## @description Finish configuring Hadoop specific system properties
  1114. ## @description prior to executing Java
  1115. ## @audience private
  1116. ## @stability evolving
  1117. ## @replaceable yes
  1118. function hadoop_finalize_hadoop_opts
  1119. {
  1120. hadoop_translate_cygwin_path HADOOP_LOG_DIR
  1121. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  1122. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  1123. hadoop_translate_cygwin_path HADOOP_HOME
  1124. export HADOOP_HOME
  1125. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1126. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  1127. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  1128. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  1129. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  1130. }
  1131. ## @description Finish Java classpath prior to execution
  1132. ## @audience private
  1133. ## @stability evolving
  1134. ## @replaceable yes
  1135. function hadoop_finalize_classpath
  1136. {
  1137. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  1138. # user classpath gets added at the last minute. this allows
  1139. # override of CONF dirs and more
  1140. hadoop_add_to_classpath_userpath
  1141. hadoop_translate_cygwin_path CLASSPATH true
  1142. }
  1143. ## @description Finish Catalina configuration prior to execution
  1144. ## @audience private
  1145. ## @stability evolving
  1146. ## @replaceable yes
  1147. function hadoop_finalize_catalina_opts
  1148. {
  1149. local prefix=${HADOOP_CATALINA_PREFIX}
  1150. hadoop_add_param CATALINA_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1151. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1152. hadoop_add_param CATALINA_OPTS java.library.path "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1153. fi
  1154. hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" "-D${prefix}.home.dir=${HADOOP_HOME}"
  1155. hadoop_add_param CATALINA_OPTS "${prefix}.config.dir" "-D${prefix}.config.dir=${HADOOP_CATALINA_CONFIG}"
  1156. hadoop_add_param CATALINA_OPTS "${prefix}.log.dir" "-D${prefix}.log.dir=${HADOOP_CATALINA_LOG}"
  1157. hadoop_add_param CATALINA_OPTS "${prefix}.temp.dir" "-D${prefix}.temp.dir=${HADOOP_CATALINA_TEMP}"
  1158. hadoop_add_param CATALINA_OPTS "${prefix}.admin.port" "-D${prefix}.admin.port=${HADOOP_CATALINA_ADMIN_PORT}"
  1159. hadoop_add_param CATALINA_OPTS "${prefix}.http.port" "-D${prefix}.http.port=${HADOOP_CATALINA_HTTP_PORT}"
  1160. hadoop_add_param CATALINA_OPTS "${prefix}.max.threads" "-D${prefix}.max.threads=${HADOOP_CATALINA_MAX_THREADS}"
  1161. hadoop_add_param CATALINA_OPTS "${prefix}.max.http.header.size" "-D${prefix}.max.http.header.size=${HADOOP_CATALINA_MAX_HTTP_HEADER_SIZE}"
  1162. hadoop_add_param CATALINA_OPTS "${prefix}.ssl.keystore.file" "-D${prefix}.ssl.keystore.file=${HADOOP_CATALINA_SSL_KEYSTORE_FILE}"
  1163. }
  1164. ## @description Finish all the remaining environment settings prior
  1165. ## @description to executing Java. This is a wrapper that calls
  1166. ## @description the other `finalize` routines.
  1167. ## @audience private
  1168. ## @stability evolving
  1169. ## @replaceable yes
  1170. function hadoop_finalize
  1171. {
  1172. hadoop_shellprofiles_finalize
  1173. hadoop_finalize_classpath
  1174. hadoop_finalize_libpaths
  1175. hadoop_finalize_hadoop_heap
  1176. hadoop_finalize_hadoop_opts
  1177. hadoop_translate_cygwin_path HADOOP_HOME
  1178. hadoop_translate_cygwin_path HADOOP_CONF_DIR
  1179. hadoop_translate_cygwin_path HADOOP_COMMON_HOME
  1180. hadoop_translate_cygwin_path HADOOP_HDFS_HOME
  1181. hadoop_translate_cygwin_path HADOOP_YARN_HOME
  1182. hadoop_translate_cygwin_path HADOOP_MAPRED_HOME
  1183. }
  1184. ## @description Print usage information and exit with the passed
  1185. ## @description `exitcode`
  1186. ## @audience public
  1187. ## @stability stable
  1188. ## @replaceable no
  1189. ## @param exitcode
  1190. ## @return This function will always exit.
  1191. function hadoop_exit_with_usage
  1192. {
  1193. local exitcode=$1
  1194. if [[ -z $exitcode ]]; then
  1195. exitcode=1
  1196. fi
  1197. # shellcheck disable=SC2034
  1198. if declare -F hadoop_usage >/dev/null ; then
  1199. hadoop_usage
  1200. elif [[ -x /usr/bin/cowsay ]]; then
  1201. /usr/bin/cowsay -f elephant "Sorry, no help available."
  1202. else
  1203. hadoop_error "Sorry, no help available."
  1204. fi
  1205. exit $exitcode
  1206. }
  1207. ## @description Verify that prerequisites have been met prior to
  1208. ## @description excuting a privileged program.
  1209. ## @audience private
  1210. ## @stability evolving
  1211. ## @replaceable yes
  1212. ## @return This routine may exit.
  1213. function hadoop_verify_secure_prereq
  1214. {
  1215. # if you are on an OS like Illumos that has functional roles
  1216. # and you are using pfexec, you'll probably want to change
  1217. # this.
  1218. # ${EUID} comes from the shell itself!
  1219. if [[ "${EUID}" -ne 0 ]] && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  1220. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  1221. exit 1
  1222. else
  1223. return 0
  1224. fi
  1225. }
  1226. ## @audience private
  1227. ## @stability evolving
  1228. ## @replaceable yes
  1229. function hadoop_setup_secure_service
  1230. {
  1231. # need a more complicated setup? replace me!
  1232. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  1233. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  1234. }
  1235. ## @audience private
  1236. ## @stability evolving
  1237. ## @replaceable yes
  1238. function hadoop_verify_piddir
  1239. {
  1240. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  1241. hadoop_error "No pid directory defined."
  1242. exit 1
  1243. fi
  1244. if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
  1245. hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
  1246. mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
  1247. if [[ $? -gt 0 ]]; then
  1248. hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
  1249. exit 1
  1250. fi
  1251. fi
  1252. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1253. if [[ $? -gt 0 ]]; then
  1254. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  1255. exit 1
  1256. fi
  1257. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1258. }
  1259. ## @audience private
  1260. ## @stability evolving
  1261. ## @replaceable yes
  1262. function hadoop_verify_logdir
  1263. {
  1264. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  1265. hadoop_error "No log directory defined."
  1266. exit 1
  1267. fi
  1268. if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
  1269. hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
  1270. mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
  1271. if [[ $? -gt 0 ]]; then
  1272. hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
  1273. exit 1
  1274. fi
  1275. fi
  1276. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1277. if [[ $? -gt 0 ]]; then
  1278. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  1279. exit 1
  1280. fi
  1281. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1282. }
  1283. ## @description Determine the status of the daemon referenced
  1284. ## @description by `pidfile`
  1285. ## @audience public
  1286. ## @stability stable
  1287. ## @replaceable yes
  1288. ## @param pidfile
  1289. ## @return (mostly) LSB 4.1.0 compatible status
  1290. function hadoop_status_daemon
  1291. {
  1292. #
  1293. # LSB 4.1.0 compatible status command (1)
  1294. #
  1295. # 0 = program is running
  1296. # 1 = dead, but still a pid (2)
  1297. # 2 = (not used by us)
  1298. # 3 = not running
  1299. #
  1300. # 1 - this is not an endorsement of the LSB
  1301. #
  1302. # 2 - technically, the specification says /var/run/pid, so
  1303. # we should never return this value, but we're giving
  1304. # them the benefit of a doubt and returning 1 even if
  1305. # our pid is not in in /var/run .
  1306. #
  1307. local pidfile=$1
  1308. shift
  1309. local pid
  1310. if [[ -f "${pidfile}" ]]; then
  1311. pid=$(cat "${pidfile}")
  1312. if ps -p "${pid}" > /dev/null 2>&1; then
  1313. return 0
  1314. fi
  1315. return 1
  1316. fi
  1317. return 3
  1318. }
  1319. ## @description Execute the Java `class`, passing along any `options`.
  1320. ## @description Additionally, set the Java property -Dproc_`command`.
  1321. ## @audience public
  1322. ## @stability stable
  1323. ## @replaceable yes
  1324. ## @param command
  1325. ## @param class
  1326. ## @param [options]
  1327. function hadoop_java_exec
  1328. {
  1329. # run a java command. this is used for
  1330. # non-daemons
  1331. local command=$1
  1332. local class=$2
  1333. shift 2
  1334. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1335. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1336. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1337. hadoop_debug "java: ${JAVA}"
  1338. hadoop_debug "Class name: ${class}"
  1339. hadoop_debug "Command line options: $*"
  1340. export CLASSPATH
  1341. #shellcheck disable=SC2086
  1342. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1343. }
  1344. ## @description Start a non-privileged daemon in the foreground.
  1345. ## @audience private
  1346. ## @stability evolving
  1347. ## @replaceable yes
  1348. ## @param command
  1349. ## @param class
  1350. ## @param pidfile
  1351. ## @param [options]
  1352. function hadoop_start_daemon
  1353. {
  1354. # this is our non-privileged daemon starter
  1355. # that fires up a daemon in the *foreground*
  1356. # so complex! so wow! much java!
  1357. local command=$1
  1358. local class=$2
  1359. local pidfile=$3
  1360. shift 3
  1361. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1362. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1363. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1364. hadoop_debug "java: ${JAVA}"
  1365. hadoop_debug "Class name: ${class}"
  1366. hadoop_debug "Command line options: $*"
  1367. # this is for the non-daemon pid creation
  1368. #shellcheck disable=SC2086
  1369. echo $$ > "${pidfile}" 2>/dev/null
  1370. if [[ $? -gt 0 ]]; then
  1371. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  1372. fi
  1373. export CLASSPATH
  1374. #shellcheck disable=SC2086
  1375. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1376. }
  1377. ## @description Start a non-privileged daemon in the background.
  1378. ## @audience private
  1379. ## @stability evolving
  1380. ## @replaceable yes
  1381. ## @param command
  1382. ## @param class
  1383. ## @param pidfile
  1384. ## @param outfile
  1385. ## @param [options]
  1386. function hadoop_start_daemon_wrapper
  1387. {
  1388. local daemonname=$1
  1389. local class=$2
  1390. local pidfile=$3
  1391. local outfile=$4
  1392. shift 4
  1393. local counter
  1394. hadoop_rotate_log "${outfile}"
  1395. hadoop_start_daemon "${daemonname}" \
  1396. "$class" \
  1397. "${pidfile}" \
  1398. "$@" >> "${outfile}" 2>&1 < /dev/null &
  1399. # we need to avoid a race condition here
  1400. # so let's wait for the fork to finish
  1401. # before overriding with the daemonized pid
  1402. (( counter=0 ))
  1403. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  1404. sleep 1
  1405. (( counter++ ))
  1406. done
  1407. # this is for daemon pid creation
  1408. #shellcheck disable=SC2086
  1409. echo $! > "${pidfile}" 2>/dev/null
  1410. if [[ $? -gt 0 ]]; then
  1411. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  1412. fi
  1413. # shellcheck disable=SC2086
  1414. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1415. if [[ $? -gt 0 ]]; then
  1416. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1417. fi
  1418. # shellcheck disable=SC2086
  1419. disown %+ >/dev/null 2>&1
  1420. if [[ $? -gt 0 ]]; then
  1421. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1422. fi
  1423. sleep 1
  1424. # capture the ulimit output
  1425. ulimit -a >> "${outfile}" 2>&1
  1426. # shellcheck disable=SC2086
  1427. if ! ps -p $! >/dev/null 2>&1; then
  1428. return 1
  1429. fi
  1430. return 0
  1431. }
  1432. ## @description Start a privileged daemon in the foreground.
  1433. ## @audience private
  1434. ## @stability evolving
  1435. ## @replaceable yes
  1436. ## @param command
  1437. ## @param class
  1438. ## @param daemonpidfile
  1439. ## @param daemonoutfile
  1440. ## @param daemonerrfile
  1441. ## @param wrapperpidfile
  1442. ## @param [options]
  1443. function hadoop_start_secure_daemon
  1444. {
  1445. # this is used to launch a secure daemon in the *foreground*
  1446. #
  1447. local daemonname=$1
  1448. local class=$2
  1449. # pid file to create for our daemon
  1450. local daemonpidfile=$3
  1451. # where to send stdout. jsvc has bad habits so this *may* be &1
  1452. # which means you send it to stdout!
  1453. local daemonoutfile=$4
  1454. # where to send stderr. same thing, except &2 = stderr
  1455. local daemonerrfile=$5
  1456. local privpidfile=$6
  1457. shift 6
  1458. hadoop_rotate_log "${daemonoutfile}"
  1459. hadoop_rotate_log "${daemonerrfile}"
  1460. # shellcheck disable=SC2153
  1461. jsvc="${JSVC_HOME}/jsvc"
  1462. if [[ ! -f "${jsvc}" ]]; then
  1463. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  1464. hadoop_error "or privileged daemons. Please download and install jsvc from "
  1465. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  1466. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  1467. exit 1
  1468. fi
  1469. # note that shellcheck will throw a
  1470. # bogus for-our-use-case 2086 here.
  1471. # it doesn't properly support multi-line situations
  1472. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1473. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1474. hadoop_debug "Final JSVC_HOME: ${JSVC_HOME}"
  1475. hadoop_debug "jsvc: ${jsvc}"
  1476. hadoop_debug "Class name: ${class}"
  1477. hadoop_debug "Command line options: $*"
  1478. #shellcheck disable=SC2086
  1479. echo $$ > "${privpidfile}" 2>/dev/null
  1480. if [[ $? -gt 0 ]]; then
  1481. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  1482. fi
  1483. # shellcheck disable=SC2086
  1484. exec "${jsvc}" \
  1485. "-Dproc_${daemonname}" \
  1486. -outfile "${daemonoutfile}" \
  1487. -errfile "${daemonerrfile}" \
  1488. -pidfile "${daemonpidfile}" \
  1489. -nodetach \
  1490. -user "${HADOOP_SECURE_USER}" \
  1491. -cp "${CLASSPATH}" \
  1492. ${HADOOP_OPTS} \
  1493. "${class}" "$@"
  1494. }
  1495. ## @description Start a privileged daemon in the background.
  1496. ## @audience private
  1497. ## @stability evolving
  1498. ## @replaceable yes
  1499. ## @param command
  1500. ## @param class
  1501. ## @param daemonpidfile
  1502. ## @param daemonoutfile
  1503. ## @param wrapperpidfile
  1504. ## @param warpperoutfile
  1505. ## @param daemonerrfile
  1506. ## @param [options]
  1507. function hadoop_start_secure_daemon_wrapper
  1508. {
  1509. # this wraps hadoop_start_secure_daemon to take care
  1510. # of the dirty work to launch a daemon in the background!
  1511. local daemonname=$1
  1512. local class=$2
  1513. # same rules as hadoop_start_secure_daemon except we
  1514. # have some additional parameters
  1515. local daemonpidfile=$3
  1516. local daemonoutfile=$4
  1517. # the pid file of the subprocess that spawned our
  1518. # secure launcher
  1519. local jsvcpidfile=$5
  1520. # the output of the subprocess that spawned our secure
  1521. # launcher
  1522. local jsvcoutfile=$6
  1523. local daemonerrfile=$7
  1524. shift 7
  1525. local counter
  1526. hadoop_rotate_log "${jsvcoutfile}"
  1527. hadoop_start_secure_daemon \
  1528. "${daemonname}" \
  1529. "${class}" \
  1530. "${daemonpidfile}" \
  1531. "${daemonoutfile}" \
  1532. "${daemonerrfile}" \
  1533. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  1534. # we need to avoid a race condition here
  1535. # so let's wait for the fork to finish
  1536. # before overriding with the daemonized pid
  1537. (( counter=0 ))
  1538. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  1539. sleep 1
  1540. (( counter++ ))
  1541. done
  1542. # this is for the daemon pid creation
  1543. #shellcheck disable=SC2086
  1544. echo $! > "${jsvcpidfile}" 2>/dev/null
  1545. if [[ $? -gt 0 ]]; then
  1546. hadoop_error "ERROR: Cannot write ${daemonname} pid ${daemonpidfile}."
  1547. fi
  1548. sleep 1
  1549. #shellcheck disable=SC2086
  1550. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1551. if [[ $? -gt 0 ]]; then
  1552. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1553. fi
  1554. if [[ -f "${daemonpidfile}" ]]; then
  1555. #shellcheck disable=SC2046
  1556. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  1557. if [[ $? -gt 0 ]]; then
  1558. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  1559. fi
  1560. fi
  1561. #shellcheck disable=SC2046
  1562. disown %+ >/dev/null 2>&1
  1563. if [[ $? -gt 0 ]]; then
  1564. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1565. fi
  1566. # capture the ulimit output
  1567. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  1568. #shellcheck disable=SC2086
  1569. if ! ps -p $! >/dev/null 2>&1; then
  1570. return 1
  1571. fi
  1572. return 0
  1573. }
  1574. ## @description Stop the non-privileged `command` daemon with that
  1575. ## @description that is running at `pidfile`.
  1576. ## @audience public
  1577. ## @stability stable
  1578. ## @replaceable yes
  1579. ## @param command
  1580. ## @param pidfile
  1581. function hadoop_stop_daemon
  1582. {
  1583. local cmd=$1
  1584. local pidfile=$2
  1585. shift 2
  1586. local pid
  1587. local cur_pid
  1588. if [[ -f "${pidfile}" ]]; then
  1589. pid=$(cat "$pidfile")
  1590. kill "${pid}" >/dev/null 2>&1
  1591. sleep "${HADOOP_STOP_TIMEOUT}"
  1592. if kill -0 "${pid}" > /dev/null 2>&1; then
  1593. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  1594. kill -9 "${pid}" >/dev/null 2>&1
  1595. fi
  1596. if ps -p "${pid}" > /dev/null 2>&1; then
  1597. hadoop_error "ERROR: Unable to kill ${pid}"
  1598. else
  1599. cur_pid=$(cat "$pidfile")
  1600. if [[ "${pid}" = "${cur_pid}" ]]; then
  1601. rm -f "${pidfile}" >/dev/null 2>&1
  1602. else
  1603. hadoop_error "WARNING: pid has changed for ${cmd}, skip deleting pid file"
  1604. fi
  1605. fi
  1606. fi
  1607. }
  1608. ## @description Stop the privileged `command` daemon with that
  1609. ## @description that is running at `daemonpidfile` and launched with
  1610. ## @description the wrapper at `wrapperpidfile`.
  1611. ## @audience public
  1612. ## @stability stable
  1613. ## @replaceable yes
  1614. ## @param command
  1615. ## @param daemonpidfile
  1616. ## @param wrapperpidfile
  1617. function hadoop_stop_secure_daemon
  1618. {
  1619. local command=$1
  1620. local daemonpidfile=$2
  1621. local privpidfile=$3
  1622. shift 3
  1623. local ret
  1624. local daemon_pid
  1625. local priv_pid
  1626. local cur_daemon_pid
  1627. local cur_priv_pid
  1628. daemon_pid=$(cat "$daemonpidfile")
  1629. priv_pid=$(cat "$privpidfile")
  1630. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  1631. ret=$?
  1632. cur_daemon_pid=$(cat "$daemonpidfile")
  1633. cur_priv_pid=$(cat "$privpidfile")
  1634. if [[ "${daemon_pid}" = "${cur_daemon_pid}" ]]; then
  1635. rm -f "${daemonpidfile}" >/dev/null 2>&1
  1636. else
  1637. hadoop_error "WARNING: daemon pid has changed for ${command}, skip deleting daemon pid file"
  1638. fi
  1639. if [[ "${priv_pid}" = "${cur_priv_pid}" ]]; then
  1640. rm -f "${privpidfile}" >/dev/null 2>&1
  1641. else
  1642. hadoop_error "WARNING: priv pid has changed for ${command}, skip deleting priv pid file"
  1643. fi
  1644. return ${ret}
  1645. }
  1646. ## @description Manage a non-privileged daemon.
  1647. ## @audience private
  1648. ## @stability evolving
  1649. ## @replaceable yes
  1650. ## @param [start|stop|status|default]
  1651. ## @param command
  1652. ## @param class
  1653. ## @param daemonpidfile
  1654. ## @param daemonoutfile
  1655. ## @param [options]
  1656. function hadoop_daemon_handler
  1657. {
  1658. local daemonmode=$1
  1659. local daemonname=$2
  1660. local class=$3
  1661. local daemon_pidfile=$4
  1662. local daemon_outfile=$5
  1663. shift 5
  1664. case ${daemonmode} in
  1665. status)
  1666. hadoop_status_daemon "${daemon_pidfile}"
  1667. exit $?
  1668. ;;
  1669. stop)
  1670. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  1671. exit $?
  1672. ;;
  1673. ##COMPAT -- older hadoops would also start daemons by default
  1674. start|default)
  1675. hadoop_verify_piddir
  1676. hadoop_verify_logdir
  1677. hadoop_status_daemon "${daemon_pidfile}"
  1678. if [[ $? == 0 ]]; then
  1679. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1680. exit 1
  1681. else
  1682. # stale pid file, so just remove it and continue on
  1683. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1684. fi
  1685. ##COMPAT - differenticate between --daemon start and nothing
  1686. # "nothing" shouldn't detach
  1687. if [[ "$daemonmode" = "default" ]]; then
  1688. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  1689. else
  1690. hadoop_start_daemon_wrapper "${daemonname}" \
  1691. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  1692. fi
  1693. ;;
  1694. esac
  1695. }
  1696. ## @description Manage a privileged daemon.
  1697. ## @audience private
  1698. ## @stability evolving
  1699. ## @replaceable yes
  1700. ## @param [start|stop|status|default]
  1701. ## @param command
  1702. ## @param class
  1703. ## @param daemonpidfile
  1704. ## @param daemonoutfile
  1705. ## @param wrapperpidfile
  1706. ## @param wrapperoutfile
  1707. ## @param wrappererrfile
  1708. ## @param [options]
  1709. function hadoop_secure_daemon_handler
  1710. {
  1711. local daemonmode=$1
  1712. local daemonname=$2
  1713. local classname=$3
  1714. local daemon_pidfile=$4
  1715. local daemon_outfile=$5
  1716. local priv_pidfile=$6
  1717. local priv_outfile=$7
  1718. local priv_errfile=$8
  1719. shift 8
  1720. case ${daemonmode} in
  1721. status)
  1722. hadoop_status_daemon "${daemon_pidfile}"
  1723. exit $?
  1724. ;;
  1725. stop)
  1726. hadoop_stop_secure_daemon "${daemonname}" \
  1727. "${daemon_pidfile}" "${priv_pidfile}"
  1728. exit $?
  1729. ;;
  1730. ##COMPAT -- older hadoops would also start daemons by default
  1731. start|default)
  1732. hadoop_verify_piddir
  1733. hadoop_verify_logdir
  1734. hadoop_status_daemon "${daemon_pidfile}"
  1735. if [[ $? == 0 ]]; then
  1736. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1737. exit 1
  1738. else
  1739. # stale pid file, so just remove it and continue on
  1740. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1741. fi
  1742. ##COMPAT - differenticate between --daemon start and nothing
  1743. # "nothing" shouldn't detach
  1744. if [[ "${daemonmode}" = "default" ]]; then
  1745. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  1746. "${daemon_pidfile}" "${daemon_outfile}" \
  1747. "${priv_errfile}" "${priv_pidfile}" "$@"
  1748. else
  1749. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  1750. "${daemon_pidfile}" "${daemon_outfile}" \
  1751. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  1752. fi
  1753. ;;
  1754. esac
  1755. }
  1756. ## @description Verify that ${USER} is allowed to execute the
  1757. ## @description given subcommand.
  1758. ## @audience public
  1759. ## @stability stable
  1760. ## @replaceable yes
  1761. ## @param subcommand
  1762. ## @return will exit on failure conditions
  1763. function hadoop_verify_user
  1764. {
  1765. local command=$1
  1766. local uservar="HADOOP_${command}_USER"
  1767. if [[ -n ${!uservar} ]]; then
  1768. if [[ ${!uservar} != "${USER}" ]]; then
  1769. hadoop_error "ERROR: ${command} can only be executed by ${!uservar}."
  1770. exit 1
  1771. fi
  1772. fi
  1773. }
  1774. ## @description Perform the 'hadoop classpath', etc subcommand with the given
  1775. ## @description parameters
  1776. ## @audience private
  1777. ## @stability evolving
  1778. ## @replaceable yes
  1779. ## @param [parameters]
  1780. ## @return will print & exit with no params
  1781. function hadoop_do_classpath_subcommand
  1782. {
  1783. if [[ "$#" -gt 1 ]]; then
  1784. eval "$1"=org.apache.hadoop.util.Classpath
  1785. else
  1786. hadoop_finalize
  1787. echo "${CLASSPATH}"
  1788. exit 0
  1789. fi
  1790. }
  1791. ## @description generic shell script opton parser. sets
  1792. ## @description HADOOP_PARSE_COUNTER to set number the
  1793. ## @description caller should shift
  1794. ## @audience private
  1795. ## @stability evolving
  1796. ## @replaceable yes
  1797. ## @param [parameters, typically "$@"]
  1798. function hadoop_parse_args
  1799. {
  1800. HADOOP_DAEMON_MODE="default"
  1801. HADOOP_PARSE_COUNTER=0
  1802. # not all of the options supported here are supported by all commands
  1803. # however these are:
  1804. hadoop_add_option "--config dir" "Hadoop config directory"
  1805. hadoop_add_option "--debug" "turn on shell script debug mode"
  1806. hadoop_add_option "--help" "usage information"
  1807. while true; do
  1808. hadoop_debug "hadoop_parse_args: processing $1"
  1809. case $1 in
  1810. --buildpaths)
  1811. # shellcheck disable=SC2034
  1812. HADOOP_ENABLE_BUILD_PATHS=true
  1813. shift
  1814. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1815. ;;
  1816. --config)
  1817. shift
  1818. confdir=$1
  1819. shift
  1820. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1821. if [[ -d "${confdir}" ]]; then
  1822. # shellcheck disable=SC2034
  1823. HADOOP_CONF_DIR="${confdir}"
  1824. elif [[ -z "${confdir}" ]]; then
  1825. hadoop_error "ERROR: No parameter provided for --config "
  1826. hadoop_exit_with_usage 1
  1827. else
  1828. hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
  1829. hadoop_exit_with_usage 1
  1830. fi
  1831. ;;
  1832. --daemon)
  1833. shift
  1834. HADOOP_DAEMON_MODE=$1
  1835. shift
  1836. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1837. if [[ -z "${HADOOP_DAEMON_MODE}" || \
  1838. ! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
  1839. hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
  1840. hadoop_exit_with_usage 1
  1841. fi
  1842. ;;
  1843. --debug)
  1844. shift
  1845. # shellcheck disable=SC2034
  1846. HADOOP_SHELL_SCRIPT_DEBUG=true
  1847. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1848. ;;
  1849. --help|-help|-h|help|--h|--\?|-\?|\?)
  1850. hadoop_exit_with_usage 0
  1851. ;;
  1852. --hostnames)
  1853. shift
  1854. # shellcheck disable=SC2034
  1855. HADOOP_WORKER_NAMES="$1"
  1856. shift
  1857. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1858. ;;
  1859. --hosts)
  1860. shift
  1861. hadoop_populate_workers_file "$1"
  1862. shift
  1863. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1864. ;;
  1865. --loglevel)
  1866. shift
  1867. # shellcheck disable=SC2034
  1868. HADOOP_LOGLEVEL="$1"
  1869. shift
  1870. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1871. ;;
  1872. --workers)
  1873. shift
  1874. # shellcheck disable=SC2034
  1875. HADOOP_WORKER_MODE=true
  1876. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1877. ;;
  1878. *)
  1879. break
  1880. ;;
  1881. esac
  1882. done
  1883. hadoop_debug "hadoop_parse: asking caller to skip ${HADOOP_PARSE_COUNTER}"
  1884. }
  1885. ## @description XML-escapes the characters (&'"<>) in the given parameter.
  1886. ## @audience private
  1887. ## @stability evolving
  1888. ## @replaceable yes
  1889. ## @param string
  1890. ## @return XML-escaped string
  1891. function hadoop_xml_escape
  1892. {
  1893. sed -e 's/&/\&amp;/g' -e 's/"/\\\&quot;/g' \
  1894. -e "s/'/\\\\\&apos;/g" -e 's/</\\\&lt;/g' -e 's/>/\\\&gt;/g' <<< "$1"
  1895. }
  1896. ## @description sed-escapes the characters (\/&) in the given parameter.
  1897. ## @audience private
  1898. ## @stability evolving
  1899. ## @replaceable yes
  1900. ## @param string
  1901. ## @return sed-escaped string
  1902. function hadoop_sed_escape
  1903. {
  1904. sed -e 's/[\/&]/\\&/g' <<< "$1"
  1905. }