hadoop-functions.sh 58 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # we need to declare this globally as an array, which can only
  17. # be done outside of a function
  18. declare -a HADOOP_SUBCMD_USAGE
  19. declare -a HADOOP_OPTION_USAGE
  20. ## @description Print a message to stderr
  21. ## @audience public
  22. ## @stability stable
  23. ## @replaceable no
  24. ## @param string
  25. function hadoop_error
  26. {
  27. echo "$*" 1>&2
  28. }
  29. ## @description Print a message to stderr if --debug is turned on
  30. ## @audience public
  31. ## @stability stable
  32. ## @replaceable no
  33. ## @param string
  34. function hadoop_debug
  35. {
  36. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  37. echo "DEBUG: $*" 1>&2
  38. fi
  39. }
  40. ## @description Given variable $1 delete $2 from it
  41. ## @audience public
  42. ## @stability stable
  43. ## @replaceable no
  44. function hadoop_delete_entry
  45. {
  46. if [[ ${!1} =~ \ ${2}\ ]] ; then
  47. hadoop_debug "Removing ${2} from ${1}"
  48. eval "${1}"=\""${!1// ${2} }"\"
  49. fi
  50. }
  51. ## @description Given variable $1 add $2 to it
  52. ## @audience public
  53. ## @stability stable
  54. ## @replaceable no
  55. function hadoop_add_entry
  56. {
  57. if [[ ! ${!1} =~ \ ${2}\ ]] ; then
  58. hadoop_debug "Adding ${2} to ${1}"
  59. #shellcheck disable=SC2140
  60. eval "${1}"=\""${!1} ${2} "\"
  61. fi
  62. }
  63. ## @description Given variable $1 determine if $2 is in it
  64. ## @audience public
  65. ## @stability stable
  66. ## @replaceable no
  67. ## @return 0 = yes, 1 = no
  68. function hadoop_verify_entry
  69. {
  70. # this unfortunately can't really be tested by bats. :(
  71. # so if this changes, be aware that unit tests effectively
  72. # do this function in them
  73. [[ ${!1} =~ \ ${2}\ ]]
  74. }
  75. ## @description Add a subcommand to the usage output
  76. ## @audience private
  77. ## @stability evolving
  78. ## @replaceable no
  79. ## @param subcommand
  80. ## @param subcommanddesc
  81. function hadoop_add_subcommand
  82. {
  83. local subcmd=$1
  84. local text=$2
  85. HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${text}"
  86. ((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
  87. }
  88. ## @description Add an option to the usage output
  89. ## @audience private
  90. ## @stability evolving
  91. ## @replaceable no
  92. ## @param subcommand
  93. ## @param subcommanddesc
  94. function hadoop_add_option
  95. {
  96. local option=$1
  97. local text=$2
  98. HADOOP_OPTION_USAGE[${HADOOP_OPTION_USAGE_COUNTER}]="${option}@${text}"
  99. ((HADOOP_OPTION_USAGE_COUNTER=HADOOP_OPTION_USAGE_COUNTER+1))
  100. }
  101. ## @description Reset the usage information to blank
  102. ## @audience private
  103. ## @stability evolving
  104. ## @replaceable no
  105. function hadoop_reset_usage
  106. {
  107. HADOOP_SUBCMD_USAGE=()
  108. HADOOP_OPTION_USAGE=()
  109. HADOOP_SUBCMD_USAGE_COUNTER=0
  110. HADOOP_OPTION_USAGE_COUNTER=0
  111. }
  112. ## @description Print a screen-size aware two-column output
  113. ## @audience private
  114. ## @stability evolving
  115. ## @replaceable no
  116. ## @param array
  117. function hadoop_generic_columnprinter
  118. {
  119. declare -a input=("$@")
  120. declare -i i=0
  121. declare -i counter=0
  122. declare line
  123. declare text
  124. declare option
  125. declare giventext
  126. declare -i maxoptsize
  127. declare -i foldsize
  128. declare -a tmpa
  129. declare numcols
  130. if [[ -n "${COLUMNS}" ]]; then
  131. numcols=${COLUMNS}
  132. else
  133. numcols=$(tput cols) 2>/dev/null
  134. fi
  135. if [[ -z "${numcols}"
  136. || ! "${numcols}" =~ ^[0-9]+$ ]]; then
  137. numcols=75
  138. else
  139. ((numcols=numcols-5))
  140. fi
  141. while read -r line; do
  142. tmpa[${counter}]=${line}
  143. ((counter=counter+1))
  144. option=$(echo "${line}" | cut -f1 -d'@')
  145. if [[ ${#option} -gt ${maxoptsize} ]]; then
  146. maxoptsize=${#option}
  147. fi
  148. done < <(for text in "${input[@]}"; do
  149. echo "${text}"
  150. done | sort)
  151. i=0
  152. ((foldsize=numcols-maxoptsize))
  153. until [[ $i -eq ${#tmpa[@]} ]]; do
  154. option=$(echo "${tmpa[$i]}" | cut -f1 -d'@')
  155. giventext=$(echo "${tmpa[$i]}" | cut -f2 -d'@')
  156. while read -r line; do
  157. printf "%-${maxoptsize}s %-s\n" "${option}" "${line}"
  158. option=" "
  159. done < <(echo "${giventext}"| fold -s -w ${foldsize})
  160. ((i=i+1))
  161. done
  162. }
  163. ## @description generate standard usage output
  164. ## @description and optionally takes a class
  165. ## @audience private
  166. ## @stability evolving
  167. ## @replaceable no
  168. ## @param execname
  169. ## @param true|false
  170. ## @param [text to use in place of SUBCOMMAND]
  171. function hadoop_generate_usage
  172. {
  173. local cmd=$1
  174. local takesclass=$2
  175. local subcmdtext=${3:-"SUBCOMMAND"}
  176. local haveoptions
  177. local optstring
  178. local havesubs
  179. local subcmdstring
  180. cmd=${cmd##*/}
  181. if [[ -n "${HADOOP_OPTION_USAGE_COUNTER}"
  182. && "${HADOOP_OPTION_USAGE_COUNTER}" -gt 0 ]]; then
  183. haveoptions=true
  184. optstring=" [OPTIONS]"
  185. fi
  186. if [[ -n "${HADOOP_SUBCMD_USAGE_COUNTER}"
  187. && "${HADOOP_SUBCMD_USAGE_COUNTER}" -gt 0 ]]; then
  188. havesubs=true
  189. subcmdstring=" ${subcmdtext} [${subcmdtext} OPTIONS]"
  190. fi
  191. echo "Usage: ${cmd}${optstring}${subcmdstring}"
  192. if [[ ${takesclass} = true ]]; then
  193. echo " or ${cmd}${optstring} CLASSNAME [CLASSNAME OPTIONS]"
  194. echo " where CLASSNAME is a user-provided Java class"
  195. fi
  196. if [[ "${haveoptions}" = true ]]; then
  197. echo ""
  198. echo " OPTIONS is none or any of:"
  199. echo ""
  200. hadoop_generic_columnprinter "${HADOOP_OPTION_USAGE[@]}"
  201. fi
  202. if [[ "${havesubs}" = true ]]; then
  203. echo ""
  204. echo " ${subcmdtext} is one of:"
  205. echo ""
  206. hadoop_generic_columnprinter "${HADOOP_SUBCMD_USAGE[@]}"
  207. echo ""
  208. echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
  209. fi
  210. }
  211. ## @description Replace `oldvar` with `newvar` if `oldvar` exists.
  212. ## @audience public
  213. ## @stability stable
  214. ## @replaceable yes
  215. ## @param oldvar
  216. ## @param newvar
  217. function hadoop_deprecate_envvar
  218. {
  219. local oldvar=$1
  220. local newvar=$2
  221. local oldval=${!oldvar}
  222. local newval=${!newvar}
  223. if [[ -n "${oldval}" ]]; then
  224. hadoop_error "WARNING: ${oldvar} has been replaced by ${newvar}. Using value of ${oldvar}."
  225. # shellcheck disable=SC2086
  226. eval ${newvar}=\"${oldval}\"
  227. # shellcheck disable=SC2086
  228. newval=${oldval}
  229. # shellcheck disable=SC2086
  230. eval ${newvar}=\"${newval}\"
  231. fi
  232. }
  233. ## @description Bootstraps the Hadoop shell environment
  234. ## @audience private
  235. ## @stability evolving
  236. ## @replaceable no
  237. function hadoop_bootstrap
  238. {
  239. # the root of the Hadoop installation
  240. # See HADOOP-6255 for the expected directory structure layout
  241. if [[ -n "${DEFAULT_LIBEXEC_DIR}" ]]; then
  242. hadoop_error "WARNING: DEFAULT_LIBEXEC_DIR ignored. It has been replaced by HADOOP_DEFAULT_LIBEXEC_DIR."
  243. fi
  244. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  245. # We can piggyback off of that to figure out where the default
  246. # HADOOP_FREFIX should be. This allows us to run without
  247. # HADOOP_HOME ever being defined by a human! As a consequence
  248. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  249. # env var within Hadoop.
  250. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  251. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  252. exit 1
  253. fi
  254. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  255. HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DEFAULT_PREFIX}
  256. export HADOOP_HOME
  257. #
  258. # short-cuts. vendors may redefine these as well, preferably
  259. # in hadoop-layouts.sh
  260. #
  261. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  262. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  263. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  264. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  265. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  266. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  267. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  268. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  269. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  270. HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}}
  271. HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
  272. HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}
  273. # usage output set to zero
  274. hadoop_reset_usage
  275. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  276. # defaults
  277. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  278. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  279. }
  280. ## @description Locate Hadoop's configuration directory
  281. ## @audience private
  282. ## @stability evolving
  283. ## @replaceable no
  284. function hadoop_find_confdir
  285. {
  286. local conf_dir
  287. # An attempt at compatibility with some Hadoop 1.x
  288. # installs.
  289. if [[ -e "${HADOOP_HOME}/conf/hadoop-env.sh" ]]; then
  290. conf_dir="conf"
  291. else
  292. conf_dir="etc/hadoop"
  293. fi
  294. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_HOME}/${conf_dir}}"
  295. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  296. }
  297. ## @description Validate ${HADOOP_CONF_DIR}
  298. ## @audience public
  299. ## @stability stable
  300. ## @replaceable yes
  301. ## @return will exit on failure conditions
  302. function hadoop_verify_confdir
  303. {
  304. # Check only log4j.properties by default.
  305. # --loglevel does not work without logger settings in log4j.log4j.properties.
  306. if [[ ! -f "${HADOOP_CONF_DIR}/log4j.properties" ]]; then
  307. hadoop_error "WARNING: log4j.properties is not found. HADOOP_CONF_DIR may be incomplete."
  308. fi
  309. }
  310. ## @description Import the hadoop-env.sh settings
  311. ## @audience private
  312. ## @stability evolving
  313. ## @replaceable no
  314. function hadoop_exec_hadoopenv
  315. {
  316. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  317. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  318. export HADOOP_ENV_PROCESSED=true
  319. # shellcheck disable=SC1090
  320. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  321. fi
  322. fi
  323. }
  324. ## @description Import the replaced functions
  325. ## @audience private
  326. ## @stability evolving
  327. ## @replaceable no
  328. function hadoop_exec_userfuncs
  329. {
  330. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  331. # shellcheck disable=SC1090
  332. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  333. fi
  334. }
  335. ## @description Read the user's settings. This provides for users to
  336. ## @description override and/or append hadoop-env.sh. It is not meant
  337. ## @description as a complete system override.
  338. ## @audience private
  339. ## @stability evolving
  340. ## @replaceable yes
  341. function hadoop_exec_hadooprc
  342. {
  343. if [[ -f "${HOME}/.hadooprc" ]]; then
  344. hadoop_debug "Applying the user's .hadooprc"
  345. # shellcheck disable=SC1090
  346. . "${HOME}/.hadooprc"
  347. fi
  348. }
  349. ## @description Import shellprofile.d content
  350. ## @audience private
  351. ## @stability evolving
  352. ## @replaceable yes
  353. function hadoop_import_shellprofiles
  354. {
  355. local i
  356. local files1
  357. local files2
  358. if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
  359. files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
  360. hadoop_debug "shellprofiles: ${files1[*]}"
  361. else
  362. hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
  363. fi
  364. if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
  365. files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
  366. fi
  367. # enable bundled shellprofiles that come
  368. # from hadoop-tools. This converts the user-facing HADOOP_OPTIONAL_TOOLS
  369. # to the HADOOP_TOOLS_OPTIONS that the shell profiles expect.
  370. # See dist-tools-hooks-maker for how the example HADOOP_OPTIONAL_TOOLS
  371. # gets populated into hadoop-env.sh
  372. for i in ${HADOOP_OPTIONAL_TOOLS//,/ }; do
  373. hadoop_add_entry HADOOP_TOOLS_OPTIONS "${i}"
  374. done
  375. for i in "${files1[@]}" "${files2[@]}"
  376. do
  377. if [[ -n "${i}"
  378. && -f "${i}" ]]; then
  379. hadoop_debug "Profiles: importing ${i}"
  380. # shellcheck disable=SC1090
  381. . "${i}"
  382. fi
  383. done
  384. }
  385. ## @description Initialize the registered shell profiles
  386. ## @audience private
  387. ## @stability evolving
  388. ## @replaceable yes
  389. function hadoop_shellprofiles_init
  390. {
  391. local i
  392. for i in ${HADOOP_SHELL_PROFILES}
  393. do
  394. if declare -F _${i}_hadoop_init >/dev/null ; then
  395. hadoop_debug "Profiles: ${i} init"
  396. # shellcheck disable=SC2086
  397. _${i}_hadoop_init
  398. fi
  399. done
  400. }
  401. ## @description Apply the shell profile classpath additions
  402. ## @audience private
  403. ## @stability evolving
  404. ## @replaceable yes
  405. function hadoop_shellprofiles_classpath
  406. {
  407. local i
  408. for i in ${HADOOP_SHELL_PROFILES}
  409. do
  410. if declare -F _${i}_hadoop_classpath >/dev/null ; then
  411. hadoop_debug "Profiles: ${i} classpath"
  412. # shellcheck disable=SC2086
  413. _${i}_hadoop_classpath
  414. fi
  415. done
  416. }
  417. ## @description Apply the shell profile native library additions
  418. ## @audience private
  419. ## @stability evolving
  420. ## @replaceable yes
  421. function hadoop_shellprofiles_nativelib
  422. {
  423. local i
  424. for i in ${HADOOP_SHELL_PROFILES}
  425. do
  426. if declare -F _${i}_hadoop_nativelib >/dev/null ; then
  427. hadoop_debug "Profiles: ${i} nativelib"
  428. # shellcheck disable=SC2086
  429. _${i}_hadoop_nativelib
  430. fi
  431. done
  432. }
  433. ## @description Apply the shell profile final configuration
  434. ## @audience private
  435. ## @stability evolving
  436. ## @replaceable yes
  437. function hadoop_shellprofiles_finalize
  438. {
  439. local i
  440. for i in ${HADOOP_SHELL_PROFILES}
  441. do
  442. if declare -F _${i}_hadoop_finalize >/dev/null ; then
  443. hadoop_debug "Profiles: ${i} finalize"
  444. # shellcheck disable=SC2086
  445. _${i}_hadoop_finalize
  446. fi
  447. done
  448. }
  449. ## @description Initialize the Hadoop shell environment, now that
  450. ## @description user settings have been imported
  451. ## @audience private
  452. ## @stability evolving
  453. ## @replaceable no
  454. function hadoop_basic_init
  455. {
  456. # Some of these are also set in hadoop-env.sh.
  457. # we still set them here just in case hadoop-env.sh is
  458. # broken in some way, set up defaults, etc.
  459. #
  460. # but it is important to note that if you update these
  461. # you also need to update hadoop-env.sh as well!!!
  462. CLASSPATH=""
  463. hadoop_debug "Initialize CLASSPATH"
  464. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  465. [[ -d "${HADOOP_HOME}/${HADOOP_COMMON_DIR}" ]]; then
  466. export HADOOP_COMMON_HOME="${HADOOP_HOME}"
  467. fi
  468. # default policy file for service-level authorization
  469. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  470. # define HADOOP_HDFS_HOME
  471. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  472. [[ -d "${HADOOP_HOME}/${HDFS_DIR}" ]]; then
  473. export HADOOP_HDFS_HOME="${HADOOP_HOME}"
  474. fi
  475. # define HADOOP_YARN_HOME
  476. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  477. [[ -d "${HADOOP_HOME}/${YARN_DIR}" ]]; then
  478. export HADOOP_YARN_HOME="${HADOOP_HOME}"
  479. fi
  480. # define HADOOP_MAPRED_HOME
  481. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  482. [[ -d "${HADOOP_HOME}/${MAPRED_DIR}" ]]; then
  483. export HADOOP_MAPRED_HOME="${HADOOP_HOME}"
  484. fi
  485. if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
  486. hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
  487. exit 1
  488. fi
  489. if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
  490. hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
  491. exit 1
  492. fi
  493. if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
  494. hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
  495. exit 1
  496. fi
  497. if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
  498. hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
  499. exit 1
  500. fi
  501. # if for some reason the shell doesn't have $USER defined
  502. # let's define it as 'hadoop'
  503. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  504. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-hadoop}
  505. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_HOME}/logs"}
  506. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  507. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  508. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  509. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  510. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  511. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  512. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  513. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  514. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS:-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  515. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  516. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  517. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  518. }
  519. ## @description Set the slave support information to the contents
  520. ## @description of `filename`
  521. ## @audience public
  522. ## @stability stable
  523. ## @replaceable no
  524. ## @param filename
  525. ## @return will exit if file does not exist
  526. function hadoop_populate_slaves_file
  527. {
  528. local slavesfile=$1
  529. shift
  530. if [[ -f "${slavesfile}" ]]; then
  531. # shellcheck disable=2034
  532. HADOOP_SLAVES="${slavesfile}"
  533. elif [[ -f "${HADOOP_CONF_DIR}/${slavesfile}" ]]; then
  534. # shellcheck disable=2034
  535. HADOOP_SLAVES="${HADOOP_CONF_DIR}/${slavesfile}"
  536. else
  537. hadoop_error "ERROR: Cannot find hosts file \"${slavesfile}\""
  538. hadoop_exit_with_usage 1
  539. fi
  540. }
  541. ## @description Rotates the given `file` until `number` of
  542. ## @description files exist.
  543. ## @audience public
  544. ## @stability stable
  545. ## @replaceable no
  546. ## @param filename
  547. ## @param [number]
  548. ## @return $? will contain last mv's return value
  549. function hadoop_rotate_log
  550. {
  551. #
  552. # Users are likely to replace this one for something
  553. # that gzips or uses dates or who knows what.
  554. #
  555. # be aware that &1 and &2 might go through here
  556. # so don't do anything too crazy...
  557. #
  558. local log=$1;
  559. local num=${2:-5};
  560. if [[ -f "${log}" ]]; then # rotate logs
  561. while [[ ${num} -gt 1 ]]; do
  562. #shellcheck disable=SC2086
  563. let prev=${num}-1
  564. if [[ -f "${log}.${prev}" ]]; then
  565. mv "${log}.${prev}" "${log}.${num}"
  566. fi
  567. num=${prev}
  568. done
  569. mv "${log}" "${log}.${num}"
  570. fi
  571. }
  572. ## @description Via ssh, log into `hostname` and run `command`
  573. ## @audience private
  574. ## @stability evolving
  575. ## @replaceable yes
  576. ## @param hostname
  577. ## @param command
  578. ## @param [...]
  579. function hadoop_actual_ssh
  580. {
  581. # we are passing this function to xargs
  582. # should get hostname followed by rest of command line
  583. local slave=$1
  584. shift
  585. # shellcheck disable=SC2086
  586. ssh ${HADOOP_SSH_OPTS} ${slave} $"${@// /\\ }" 2>&1 | sed "s/^/$slave: /"
  587. }
  588. ## @description Connect to ${HADOOP_SLAVES} or ${HADOOP_SLAVE_NAMES}
  589. ## @description and execute command.
  590. ## @audience private
  591. ## @stability evolving
  592. ## @replaceable yes
  593. ## @param command
  594. ## @param [...]
  595. function hadoop_connect_to_hosts
  596. {
  597. # shellcheck disable=SC2124
  598. local params="$@"
  599. local slave_file
  600. local tmpslvnames
  601. #
  602. # ssh (or whatever) to a host
  603. #
  604. # User can specify hostnames or a file where the hostnames are (not both)
  605. if [[ -n "${HADOOP_SLAVES}" && -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  606. hadoop_error "ERROR: Both HADOOP_SLAVES and HADOOP_SLAVE_NAME were defined. Aborting."
  607. exit 1
  608. elif [[ -z "${HADOOP_SLAVE_NAMES}" ]]; then
  609. slave_file=${HADOOP_SLAVES:-${HADOOP_CONF_DIR}/slaves}
  610. fi
  611. # if pdsh is available, let's use it. otherwise default
  612. # to a loop around ssh. (ugh)
  613. if [[ -e '/usr/bin/pdsh' ]]; then
  614. if [[ -z "${HADOOP_SLAVE_NAMES}" ]] ; then
  615. # if we were given a file, just let pdsh deal with it.
  616. # shellcheck disable=SC2086
  617. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  618. -f "${HADOOP_SSH_PARALLEL}" -w ^"${slave_file}" $"${@// /\\ }" 2>&1
  619. else
  620. # no spaces allowed in the pdsh arg host list
  621. # shellcheck disable=SC2086
  622. tmpslvnames=$(echo ${SLAVE_NAMES} | tr -s ' ' ,)
  623. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  624. -f "${HADOOP_SSH_PARALLEL}" \
  625. -w "${tmpslvnames}" $"${@// /\\ }" 2>&1
  626. fi
  627. else
  628. if [[ -z "${HADOOP_SLAVE_NAMES}" ]]; then
  629. HADOOP_SLAVE_NAMES=$(sed 's/#.*$//;/^$/d' "${slave_file}")
  630. fi
  631. hadoop_connect_to_hosts_without_pdsh "${params}"
  632. fi
  633. }
  634. ## @description Connect to ${SLAVE_NAMES} and execute command
  635. ## @description under the environment which does not support pdsh.
  636. ## @audience private
  637. ## @stability evolving
  638. ## @replaceable yes
  639. ## @param command
  640. ## @param [...]
  641. function hadoop_connect_to_hosts_without_pdsh
  642. {
  643. # shellcheck disable=SC2124
  644. local params="$@"
  645. local slaves=(${HADOOP_SLAVE_NAMES})
  646. for (( i = 0; i < ${#slaves[@]}; i++ ))
  647. do
  648. if (( i != 0 && i % HADOOP_SSH_PARALLEL == 0 )); then
  649. wait
  650. fi
  651. # shellcheck disable=SC2086
  652. hadoop_actual_ssh "${slaves[$i]}" ${params} &
  653. done
  654. wait
  655. }
  656. ## @description Utility routine to handle --slaves mode
  657. ## @audience private
  658. ## @stability evolving
  659. ## @replaceable yes
  660. ## @param commandarray
  661. function hadoop_common_slave_mode_execute
  662. {
  663. #
  664. # input should be the command line as given by the user
  665. # in the form of an array
  666. #
  667. local argv=("$@")
  668. # if --slaves is still on the command line, remove it
  669. # to prevent loops
  670. # Also remove --hostnames and --hosts along with arg values
  671. local argsSize=${#argv[@]};
  672. for (( i = 0; i < argsSize; i++ ))
  673. do
  674. if [[ "${argv[$i]}" =~ ^--slaves$ ]]; then
  675. unset argv[$i]
  676. elif [[ "${argv[$i]}" =~ ^--hostnames$ ]] ||
  677. [[ "${argv[$i]}" =~ ^--hosts$ ]]; then
  678. unset argv[$i];
  679. let i++;
  680. unset argv[$i];
  681. fi
  682. done
  683. if [[ ${QATESTMODE} = true ]]; then
  684. echo "${argv[@]}"
  685. return
  686. fi
  687. hadoop_connect_to_hosts -- "${argv[@]}"
  688. }
  689. ## @description Verify that a shell command was passed a valid
  690. ## @description class name
  691. ## @audience public
  692. ## @stability stable
  693. ## @replaceable yes
  694. ## @param classname
  695. ## @return 0 = success
  696. ## @return 1 = failure w/user message
  697. function hadoop_validate_classname
  698. {
  699. local class=$1
  700. shift 1
  701. if [[ ! ${class} =~ \. ]]; then
  702. # assuming the arg is typo of command if it does not conatain ".".
  703. # class belonging to no package is not allowed as a result.
  704. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  705. return 1
  706. fi
  707. return 0
  708. }
  709. ## @description Append the `appendstring` if `checkstring` is not
  710. ## @description present in the given `envvar`
  711. ## @audience public
  712. ## @stability stable
  713. ## @replaceable yes
  714. ## @param envvar
  715. ## @param checkstring
  716. ## @param appendstring
  717. function hadoop_add_param
  718. {
  719. #
  720. # general param dedupe..
  721. # $1 is what we are adding to
  722. # $2 is the name of what we want to add (key)
  723. # $3 is the key+value of what we're adding
  724. #
  725. # doing it this way allows us to support all sorts of
  726. # different syntaxes, just so long as they are space
  727. # delimited
  728. #
  729. if [[ ! ${!1} =~ $2 ]] ; then
  730. #shellcheck disable=SC2140
  731. eval "$1"="'${!1} $3'"
  732. if [[ ${!1:0:1} = ' ' ]]; then
  733. #shellcheck disable=SC2140
  734. eval "$1"="'${!1# }'"
  735. fi
  736. hadoop_debug "$1 accepted $3"
  737. else
  738. hadoop_debug "$1 declined $3"
  739. fi
  740. }
  741. ## @description Register the given `shellprofile` to the Hadoop
  742. ## @description shell subsystem
  743. ## @audience public
  744. ## @stability stable
  745. ## @replaceable yes
  746. ## @param shellprofile
  747. function hadoop_add_profile
  748. {
  749. # shellcheck disable=SC2086
  750. hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
  751. }
  752. ## @description Add a file system object (directory, file,
  753. ## @description wildcard, ...) to the classpath. Optionally provide
  754. ## @description a hint as to where in the classpath it should go.
  755. ## @audience public
  756. ## @stability stable
  757. ## @replaceable yes
  758. ## @param object
  759. ## @param [before|after]
  760. ## @return 0 = success (added or duplicate)
  761. ## @return 1 = failure (doesn't exist or some other reason)
  762. function hadoop_add_classpath
  763. {
  764. # However, with classpath (& JLP), we can do dedupe
  765. # along with some sanity checking (e.g., missing directories)
  766. # since we have a better idea of what is legal
  767. #
  768. # for wildcard at end, we can
  769. # at least check the dir exists
  770. if [[ $1 =~ ^.*\*$ ]]; then
  771. local mp
  772. mp=$(dirname "$1")
  773. if [[ ! -d "${mp}" ]]; then
  774. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  775. return 1
  776. fi
  777. # no wildcard in the middle, so check existence
  778. # (doesn't matter *what* it is)
  779. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  780. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  781. return 1
  782. fi
  783. if [[ -z "${CLASSPATH}" ]]; then
  784. CLASSPATH=$1
  785. hadoop_debug "Initial CLASSPATH=$1"
  786. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  787. if [[ "$2" = "before" ]]; then
  788. CLASSPATH="$1:${CLASSPATH}"
  789. hadoop_debug "Prepend CLASSPATH: $1"
  790. else
  791. CLASSPATH+=:$1
  792. hadoop_debug "Append CLASSPATH: $1"
  793. fi
  794. else
  795. hadoop_debug "Dupe CLASSPATH: $1"
  796. fi
  797. return 0
  798. }
  799. ## @description Add a file system object (directory, file,
  800. ## @description wildcard, ...) to the colonpath. Optionally provide
  801. ## @description a hint as to where in the colonpath it should go.
  802. ## @description Prior to adding, objects are checked for duplication
  803. ## @description and check for existence. Many other functions use
  804. ## @description this function as their base implementation
  805. ## @description including `hadoop_add_javalibpath` and `hadoop_add_ldlibpath`.
  806. ## @audience public
  807. ## @stability stable
  808. ## @replaceable yes
  809. ## @param envvar
  810. ## @param object
  811. ## @param [before|after]
  812. ## @return 0 = success (added or duplicate)
  813. ## @return 1 = failure (doesn't exist or some other reason)
  814. function hadoop_add_colonpath
  815. {
  816. # this is CLASSPATH, JLP, etc but with dedupe but no
  817. # other checking
  818. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  819. if [[ -z "${!1}" ]]; then
  820. # shellcheck disable=SC2086
  821. eval $1="'$2'"
  822. hadoop_debug "Initial colonpath($1): $2"
  823. elif [[ "$3" = "before" ]]; then
  824. # shellcheck disable=SC2086
  825. eval $1="'$2:${!1}'"
  826. hadoop_debug "Prepend colonpath($1): $2"
  827. else
  828. # shellcheck disable=SC2086
  829. eval $1+=":'$2'"
  830. hadoop_debug "Append colonpath($1): $2"
  831. fi
  832. return 0
  833. fi
  834. hadoop_debug "Rejected colonpath($1): $2"
  835. return 1
  836. }
  837. ## @description Add a file system object (directory, file,
  838. ## @description wildcard, ...) to the Java JNI path. Optionally
  839. ## @description provide a hint as to where in the Java JNI path
  840. ## @description it should go.
  841. ## @audience public
  842. ## @stability stable
  843. ## @replaceable yes
  844. ## @param object
  845. ## @param [before|after]
  846. ## @return 0 = success (added or duplicate)
  847. ## @return 1 = failure (doesn't exist or some other reason)
  848. function hadoop_add_javalibpath
  849. {
  850. # specialized function for a common use case
  851. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  852. }
  853. ## @description Add a file system object (directory, file,
  854. ## @description wildcard, ...) to the LD_LIBRARY_PATH. Optionally
  855. ## @description provide a hint as to where in the LD_LIBRARY_PATH
  856. ## @description it should go.
  857. ## @audience public
  858. ## @stability stable
  859. ## @replaceable yes
  860. ## @param object
  861. ## @param [before|after]
  862. ## @return 0 = success (added or duplicate)
  863. ## @return 1 = failure (doesn't exist or some other reason)
  864. function hadoop_add_ldlibpath
  865. {
  866. local status
  867. # specialized function for a common use case
  868. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  869. status=$?
  870. # note that we export this
  871. export LD_LIBRARY_PATH
  872. return ${status}
  873. }
  874. ## @description Add the common/core Hadoop components to the
  875. ## @description environment
  876. ## @audience private
  877. ## @stability evolving
  878. ## @replaceable yes
  879. ## @returns 1 on failure, may exit
  880. ## @returns 0 on success
  881. function hadoop_add_common_to_classpath
  882. {
  883. #
  884. # get all of the common jars+config in the path
  885. #
  886. if [[ -z "${HADOOP_COMMON_HOME}"
  887. || -z "${HADOOP_COMMON_DIR}"
  888. || -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
  889. hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
  890. hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
  891. hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
  892. hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
  893. exit 1
  894. fi
  895. # developers
  896. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  897. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  898. fi
  899. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  900. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  901. }
  902. ## @description Run libexec/tools/module.sh to add to the classpath
  903. ## @description environment
  904. ## @audience private
  905. ## @stability evolving
  906. ## @replaceable yes
  907. ## @param module
  908. function hadoop_add_to_classpath_tools
  909. {
  910. declare module=$1
  911. if [[ -f "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh" ]]; then
  912. # shellcheck disable=SC1090
  913. . "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh"
  914. else
  915. hadoop_error "ERROR: Tools helper ${HADOOP_LIBEXEC_DIR}/tools/${module}.sh was not found."
  916. fi
  917. if declare -f hadoop_classpath_tools_${module} >/dev/null 2>&1; then
  918. "hadoop_classpath_tools_${module}"
  919. fi
  920. }
  921. ## @description Add the user's custom classpath settings to the
  922. ## @description environment
  923. ## @audience private
  924. ## @stability evolving
  925. ## @replaceable yes
  926. function hadoop_add_to_classpath_userpath
  927. {
  928. # Add the user-specified HADOOP_CLASSPATH to the
  929. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  930. # is not set.
  931. # Add it first or last depending on if user has
  932. # set env-var HADOOP_USER_CLASSPATH_FIRST
  933. # we'll also dedupe it, because we're cool like that.
  934. #
  935. declare -a array
  936. declare -i c=0
  937. declare -i j
  938. declare -i i
  939. declare idx
  940. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  941. # I wonder if Java runs on VMS.
  942. for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  943. array[${c}]=${idx}
  944. ((c=c+1))
  945. done
  946. # bats gets confused by j getting set to 0
  947. ((j=c-1)) || ${QATESTMODE}
  948. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  949. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  950. for ((i=0; i<=j; i++)); do
  951. hadoop_add_classpath "${array[$i]}" after
  952. done
  953. else
  954. for ((i=j; i>=0; i--)); do
  955. hadoop_add_classpath "${array[$i]}" before
  956. done
  957. fi
  958. fi
  959. fi
  960. }
  961. ## @description Routine to configure any OS-specific settings.
  962. ## @audience public
  963. ## @stability stable
  964. ## @replaceable yes
  965. ## @return may exit on failure conditions
  966. function hadoop_os_tricks
  967. {
  968. local bindv6only
  969. HADOOP_IS_CYGWIN=false
  970. case ${HADOOP_OS_TYPE} in
  971. Darwin)
  972. if [[ -z "${JAVA_HOME}" ]]; then
  973. if [[ -x /usr/libexec/java_home ]]; then
  974. JAVA_HOME="$(/usr/libexec/java_home)"
  975. export JAVA_HOME
  976. else
  977. JAVA_HOME=/Library/Java/Home
  978. export JAVA_HOME
  979. fi
  980. fi
  981. ;;
  982. Linux)
  983. # Newer versions of glibc use an arena memory allocator that
  984. # causes virtual # memory usage to explode. This interacts badly
  985. # with the many threads that we use in Hadoop. Tune the variable
  986. # down to prevent vmem explosion.
  987. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  988. # we put this in QA test mode off so that non-Linux can test
  989. if [[ "${QATESTMODE}" = true ]]; then
  990. return
  991. fi
  992. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  993. # undocumented in hadoop-env.sh because we don't want users to
  994. # shoot themselves in the foot while devs make IPv6 work.
  995. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  996. if [[ -n "${bindv6only}" ]] &&
  997. [[ "${bindv6only}" -eq "1" ]] &&
  998. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  999. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  1000. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  1001. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  1002. exit 1
  1003. fi
  1004. ;;
  1005. CYGWIN*)
  1006. # Flag that we're running on Cygwin to trigger path translation later.
  1007. HADOOP_IS_CYGWIN=true
  1008. ;;
  1009. esac
  1010. }
  1011. ## @description Configure/verify ${JAVA_HOME}
  1012. ## @audience public
  1013. ## @stability stable
  1014. ## @replaceable yes
  1015. ## @return may exit on failure conditions
  1016. function hadoop_java_setup
  1017. {
  1018. # Bail if we did not detect it
  1019. if [[ -z "${JAVA_HOME}" ]]; then
  1020. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  1021. exit 1
  1022. fi
  1023. if [[ ! -d "${JAVA_HOME}" ]]; then
  1024. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  1025. exit 1
  1026. fi
  1027. JAVA="${JAVA_HOME}/bin/java"
  1028. if [[ ! -x "$JAVA" ]]; then
  1029. hadoop_error "ERROR: $JAVA is not executable."
  1030. exit 1
  1031. fi
  1032. }
  1033. ## @description Finish Java JNI paths prior to execution
  1034. ## @audience private
  1035. ## @stability evolving
  1036. ## @replaceable yes
  1037. function hadoop_finalize_libpaths
  1038. {
  1039. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1040. hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
  1041. hadoop_add_param HADOOP_OPTS java.library.path \
  1042. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1043. export LD_LIBRARY_PATH
  1044. fi
  1045. }
  1046. ## @description Finish Java heap parameters prior to execution
  1047. ## @audience private
  1048. ## @stability evolving
  1049. ## @replaceable yes
  1050. function hadoop_finalize_hadoop_heap
  1051. {
  1052. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  1053. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  1054. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  1055. fi
  1056. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  1057. fi
  1058. # backwards compatibility
  1059. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  1060. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  1061. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  1062. fi
  1063. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  1064. fi
  1065. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  1066. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  1067. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  1068. fi
  1069. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  1070. fi
  1071. }
  1072. ## @description Converts the contents of the variable name
  1073. ## @description `varnameref` into the equivalent Windows path.
  1074. ## @description If the second parameter is true, then `varnameref`
  1075. ## @description is treated as though it was a path list.
  1076. ## @audience public
  1077. ## @stability stable
  1078. ## @replaceable yes
  1079. ## @param varnameref
  1080. ## @param [true]
  1081. function hadoop_translate_cygwin_path
  1082. {
  1083. if [[ "${HADOOP_IS_CYGWIN}" = "true" ]]; then
  1084. if [[ "$2" = "true" ]]; then
  1085. #shellcheck disable=SC2016
  1086. eval "$1"='$(cygpath -p -w "${!1}" 2>/dev/null)'
  1087. else
  1088. #shellcheck disable=SC2016
  1089. eval "$1"='$(cygpath -w "${!1}" 2>/dev/null)'
  1090. fi
  1091. fi
  1092. }
  1093. ## @description Finish configuring Hadoop specific system properties
  1094. ## @description prior to executing Java
  1095. ## @audience private
  1096. ## @stability evolving
  1097. ## @replaceable yes
  1098. function hadoop_finalize_hadoop_opts
  1099. {
  1100. hadoop_translate_cygwin_path HADOOP_LOG_DIR
  1101. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  1102. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  1103. hadoop_translate_cygwin_path HADOOP_HOME
  1104. export HADOOP_HOME
  1105. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1106. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  1107. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  1108. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  1109. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  1110. }
  1111. ## @description Finish Java classpath prior to execution
  1112. ## @audience private
  1113. ## @stability evolving
  1114. ## @replaceable yes
  1115. function hadoop_finalize_classpath
  1116. {
  1117. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  1118. # user classpath gets added at the last minute. this allows
  1119. # override of CONF dirs and more
  1120. hadoop_add_to_classpath_userpath
  1121. hadoop_translate_cygwin_path CLASSPATH true
  1122. }
  1123. ## @description Finish Catalina configuration prior to execution
  1124. ## @audience private
  1125. ## @stability evolving
  1126. ## @replaceable yes
  1127. function hadoop_finalize_catalina_opts
  1128. {
  1129. local prefix=${HADOOP_CATALINA_PREFIX}
  1130. hadoop_add_param CATALINA_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1131. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1132. hadoop_add_param CATALINA_OPTS java.library.path "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1133. fi
  1134. hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" "-D${prefix}.home.dir=${HADOOP_HOME}"
  1135. hadoop_add_param CATALINA_OPTS "${prefix}.config.dir" "-D${prefix}.config.dir=${HADOOP_CATALINA_CONFIG}"
  1136. hadoop_add_param CATALINA_OPTS "${prefix}.log.dir" "-D${prefix}.log.dir=${HADOOP_CATALINA_LOG}"
  1137. hadoop_add_param CATALINA_OPTS "${prefix}.temp.dir" "-D${prefix}.temp.dir=${HADOOP_CATALINA_TEMP}"
  1138. hadoop_add_param CATALINA_OPTS "${prefix}.admin.port" "-D${prefix}.admin.port=${HADOOP_CATALINA_ADMIN_PORT}"
  1139. hadoop_add_param CATALINA_OPTS "${prefix}.http.port" "-D${prefix}.http.port=${HADOOP_CATALINA_HTTP_PORT}"
  1140. hadoop_add_param CATALINA_OPTS "${prefix}.max.threads" "-D${prefix}.max.threads=${HADOOP_CATALINA_MAX_THREADS}"
  1141. hadoop_add_param CATALINA_OPTS "${prefix}.max.http.header.size" "-D${prefix}.max.http.header.size=${HADOOP_CATALINA_MAX_HTTP_HEADER_SIZE}"
  1142. hadoop_add_param CATALINA_OPTS "${prefix}.ssl.keystore.file" "-D${prefix}.ssl.keystore.file=${HADOOP_CATALINA_SSL_KEYSTORE_FILE}"
  1143. }
  1144. ## @description Finish all the remaining environment settings prior
  1145. ## @description to executing Java. This is a wrapper that calls
  1146. ## @description the other `finalize` routines.
  1147. ## @audience private
  1148. ## @stability evolving
  1149. ## @replaceable yes
  1150. function hadoop_finalize
  1151. {
  1152. hadoop_shellprofiles_finalize
  1153. hadoop_finalize_classpath
  1154. hadoop_finalize_libpaths
  1155. hadoop_finalize_hadoop_heap
  1156. hadoop_finalize_hadoop_opts
  1157. hadoop_translate_cygwin_path HADOOP_HOME
  1158. hadoop_translate_cygwin_path HADOOP_CONF_DIR
  1159. hadoop_translate_cygwin_path HADOOP_COMMON_HOME
  1160. hadoop_translate_cygwin_path HADOOP_HDFS_HOME
  1161. hadoop_translate_cygwin_path HADOOP_YARN_HOME
  1162. hadoop_translate_cygwin_path HADOOP_MAPRED_HOME
  1163. }
  1164. ## @description Print usage information and exit with the passed
  1165. ## @description `exitcode`
  1166. ## @audience public
  1167. ## @stability stable
  1168. ## @replaceable no
  1169. ## @param exitcode
  1170. ## @return This function will always exit.
  1171. function hadoop_exit_with_usage
  1172. {
  1173. local exitcode=$1
  1174. if [[ -z $exitcode ]]; then
  1175. exitcode=1
  1176. fi
  1177. # shellcheck disable=SC2034
  1178. if declare -F hadoop_usage >/dev/null ; then
  1179. hadoop_usage
  1180. elif [[ -x /usr/bin/cowsay ]]; then
  1181. /usr/bin/cowsay -f elephant "Sorry, no help available."
  1182. else
  1183. hadoop_error "Sorry, no help available."
  1184. fi
  1185. exit $exitcode
  1186. }
  1187. ## @description Verify that prerequisites have been met prior to
  1188. ## @description excuting a privileged program.
  1189. ## @audience private
  1190. ## @stability evolving
  1191. ## @replaceable yes
  1192. ## @return This routine may exit.
  1193. function hadoop_verify_secure_prereq
  1194. {
  1195. # if you are on an OS like Illumos that has functional roles
  1196. # and you are using pfexec, you'll probably want to change
  1197. # this.
  1198. # ${EUID} comes from the shell itself!
  1199. if [[ "${EUID}" -ne 0 ]] && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  1200. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  1201. exit 1
  1202. else
  1203. return 0
  1204. fi
  1205. }
  1206. ## @audience private
  1207. ## @stability evolving
  1208. ## @replaceable yes
  1209. function hadoop_setup_secure_service
  1210. {
  1211. # need a more complicated setup? replace me!
  1212. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  1213. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  1214. }
  1215. ## @audience private
  1216. ## @stability evolving
  1217. ## @replaceable yes
  1218. function hadoop_verify_piddir
  1219. {
  1220. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  1221. hadoop_error "No pid directory defined."
  1222. exit 1
  1223. fi
  1224. if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
  1225. hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
  1226. mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
  1227. if [[ $? -gt 0 ]]; then
  1228. hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
  1229. exit 1
  1230. fi
  1231. fi
  1232. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1233. if [[ $? -gt 0 ]]; then
  1234. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  1235. exit 1
  1236. fi
  1237. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1238. }
  1239. ## @audience private
  1240. ## @stability evolving
  1241. ## @replaceable yes
  1242. function hadoop_verify_logdir
  1243. {
  1244. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  1245. hadoop_error "No log directory defined."
  1246. exit 1
  1247. fi
  1248. if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
  1249. hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
  1250. mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
  1251. if [[ $? -gt 0 ]]; then
  1252. hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
  1253. exit 1
  1254. fi
  1255. fi
  1256. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1257. if [[ $? -gt 0 ]]; then
  1258. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  1259. exit 1
  1260. fi
  1261. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1262. }
  1263. ## @description Determine the status of the daemon referenced
  1264. ## @description by `pidfile`
  1265. ## @audience public
  1266. ## @stability stable
  1267. ## @replaceable yes
  1268. ## @param pidfile
  1269. ## @return (mostly) LSB 4.1.0 compatible status
  1270. function hadoop_status_daemon
  1271. {
  1272. #
  1273. # LSB 4.1.0 compatible status command (1)
  1274. #
  1275. # 0 = program is running
  1276. # 1 = dead, but still a pid (2)
  1277. # 2 = (not used by us)
  1278. # 3 = not running
  1279. #
  1280. # 1 - this is not an endorsement of the LSB
  1281. #
  1282. # 2 - technically, the specification says /var/run/pid, so
  1283. # we should never return this value, but we're giving
  1284. # them the benefit of a doubt and returning 1 even if
  1285. # our pid is not in in /var/run .
  1286. #
  1287. local pidfile=$1
  1288. shift
  1289. local pid
  1290. if [[ -f "${pidfile}" ]]; then
  1291. pid=$(cat "${pidfile}")
  1292. if ps -p "${pid}" > /dev/null 2>&1; then
  1293. return 0
  1294. fi
  1295. return 1
  1296. fi
  1297. return 3
  1298. }
  1299. ## @description Execute the Java `class`, passing along any `options`.
  1300. ## @description Additionally, set the Java property -Dproc_`command`.
  1301. ## @audience public
  1302. ## @stability stable
  1303. ## @replaceable yes
  1304. ## @param command
  1305. ## @param class
  1306. ## @param [options]
  1307. function hadoop_java_exec
  1308. {
  1309. # run a java command. this is used for
  1310. # non-daemons
  1311. local command=$1
  1312. local class=$2
  1313. shift 2
  1314. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1315. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1316. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1317. hadoop_debug "java: ${JAVA}"
  1318. hadoop_debug "Class name: ${class}"
  1319. hadoop_debug "Command line options: $*"
  1320. export CLASSPATH
  1321. #shellcheck disable=SC2086
  1322. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1323. }
  1324. ## @description Start a non-privileged daemon in the foreground.
  1325. ## @audience private
  1326. ## @stability evolving
  1327. ## @replaceable yes
  1328. ## @param command
  1329. ## @param class
  1330. ## @param pidfile
  1331. ## @param [options]
  1332. function hadoop_start_daemon
  1333. {
  1334. # this is our non-privileged daemon starter
  1335. # that fires up a daemon in the *foreground*
  1336. # so complex! so wow! much java!
  1337. local command=$1
  1338. local class=$2
  1339. local pidfile=$3
  1340. shift 3
  1341. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1342. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1343. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1344. hadoop_debug "java: ${JAVA}"
  1345. hadoop_debug "Class name: ${class}"
  1346. hadoop_debug "Command line options: $*"
  1347. # this is for the non-daemon pid creation
  1348. #shellcheck disable=SC2086
  1349. echo $$ > "${pidfile}" 2>/dev/null
  1350. if [[ $? -gt 0 ]]; then
  1351. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  1352. fi
  1353. export CLASSPATH
  1354. #shellcheck disable=SC2086
  1355. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1356. }
  1357. ## @description Start a non-privileged daemon in the background.
  1358. ## @audience private
  1359. ## @stability evolving
  1360. ## @replaceable yes
  1361. ## @param command
  1362. ## @param class
  1363. ## @param pidfile
  1364. ## @param outfile
  1365. ## @param [options]
  1366. function hadoop_start_daemon_wrapper
  1367. {
  1368. local daemonname=$1
  1369. local class=$2
  1370. local pidfile=$3
  1371. local outfile=$4
  1372. shift 4
  1373. local counter
  1374. hadoop_rotate_log "${outfile}"
  1375. hadoop_start_daemon "${daemonname}" \
  1376. "$class" \
  1377. "${pidfile}" \
  1378. "$@" >> "${outfile}" 2>&1 < /dev/null &
  1379. # we need to avoid a race condition here
  1380. # so let's wait for the fork to finish
  1381. # before overriding with the daemonized pid
  1382. (( counter=0 ))
  1383. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  1384. sleep 1
  1385. (( counter++ ))
  1386. done
  1387. # this is for daemon pid creation
  1388. #shellcheck disable=SC2086
  1389. echo $! > "${pidfile}" 2>/dev/null
  1390. if [[ $? -gt 0 ]]; then
  1391. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  1392. fi
  1393. # shellcheck disable=SC2086
  1394. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1395. if [[ $? -gt 0 ]]; then
  1396. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1397. fi
  1398. # shellcheck disable=SC2086
  1399. disown %+ >/dev/null 2>&1
  1400. if [[ $? -gt 0 ]]; then
  1401. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1402. fi
  1403. sleep 1
  1404. # capture the ulimit output
  1405. ulimit -a >> "${outfile}" 2>&1
  1406. # shellcheck disable=SC2086
  1407. if ! ps -p $! >/dev/null 2>&1; then
  1408. return 1
  1409. fi
  1410. return 0
  1411. }
  1412. ## @description Start a privileged daemon in the foreground.
  1413. ## @audience private
  1414. ## @stability evolving
  1415. ## @replaceable yes
  1416. ## @param command
  1417. ## @param class
  1418. ## @param daemonpidfile
  1419. ## @param daemonoutfile
  1420. ## @param daemonerrfile
  1421. ## @param wrapperpidfile
  1422. ## @param [options]
  1423. function hadoop_start_secure_daemon
  1424. {
  1425. # this is used to launch a secure daemon in the *foreground*
  1426. #
  1427. local daemonname=$1
  1428. local class=$2
  1429. # pid file to create for our daemon
  1430. local daemonpidfile=$3
  1431. # where to send stdout. jsvc has bad habits so this *may* be &1
  1432. # which means you send it to stdout!
  1433. local daemonoutfile=$4
  1434. # where to send stderr. same thing, except &2 = stderr
  1435. local daemonerrfile=$5
  1436. local privpidfile=$6
  1437. shift 6
  1438. hadoop_rotate_log "${daemonoutfile}"
  1439. hadoop_rotate_log "${daemonerrfile}"
  1440. # shellcheck disable=SC2153
  1441. jsvc="${JSVC_HOME}/jsvc"
  1442. if [[ ! -f "${jsvc}" ]]; then
  1443. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  1444. hadoop_error "or privileged daemons. Please download and install jsvc from "
  1445. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  1446. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  1447. exit 1
  1448. fi
  1449. # note that shellcheck will throw a
  1450. # bogus for-our-use-case 2086 here.
  1451. # it doesn't properly support multi-line situations
  1452. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1453. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1454. hadoop_debug "Final JSVC_HOME: ${JSVC_HOME}"
  1455. hadoop_debug "jsvc: ${jsvc}"
  1456. hadoop_debug "Class name: ${class}"
  1457. hadoop_debug "Command line options: $*"
  1458. #shellcheck disable=SC2086
  1459. echo $$ > "${privpidfile}" 2>/dev/null
  1460. if [[ $? -gt 0 ]]; then
  1461. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  1462. fi
  1463. # shellcheck disable=SC2086
  1464. exec "${jsvc}" \
  1465. "-Dproc_${daemonname}" \
  1466. -outfile "${daemonoutfile}" \
  1467. -errfile "${daemonerrfile}" \
  1468. -pidfile "${daemonpidfile}" \
  1469. -nodetach \
  1470. -user "${HADOOP_SECURE_USER}" \
  1471. -cp "${CLASSPATH}" \
  1472. ${HADOOP_OPTS} \
  1473. "${class}" "$@"
  1474. }
  1475. ## @description Start a privileged daemon in the background.
  1476. ## @audience private
  1477. ## @stability evolving
  1478. ## @replaceable yes
  1479. ## @param command
  1480. ## @param class
  1481. ## @param daemonpidfile
  1482. ## @param daemonoutfile
  1483. ## @param wrapperpidfile
  1484. ## @param warpperoutfile
  1485. ## @param daemonerrfile
  1486. ## @param [options]
  1487. function hadoop_start_secure_daemon_wrapper
  1488. {
  1489. # this wraps hadoop_start_secure_daemon to take care
  1490. # of the dirty work to launch a daemon in the background!
  1491. local daemonname=$1
  1492. local class=$2
  1493. # same rules as hadoop_start_secure_daemon except we
  1494. # have some additional parameters
  1495. local daemonpidfile=$3
  1496. local daemonoutfile=$4
  1497. # the pid file of the subprocess that spawned our
  1498. # secure launcher
  1499. local jsvcpidfile=$5
  1500. # the output of the subprocess that spawned our secure
  1501. # launcher
  1502. local jsvcoutfile=$6
  1503. local daemonerrfile=$7
  1504. shift 7
  1505. local counter
  1506. hadoop_rotate_log "${jsvcoutfile}"
  1507. hadoop_start_secure_daemon \
  1508. "${daemonname}" \
  1509. "${class}" \
  1510. "${daemonpidfile}" \
  1511. "${daemonoutfile}" \
  1512. "${daemonerrfile}" \
  1513. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  1514. # we need to avoid a race condition here
  1515. # so let's wait for the fork to finish
  1516. # before overriding with the daemonized pid
  1517. (( counter=0 ))
  1518. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  1519. sleep 1
  1520. (( counter++ ))
  1521. done
  1522. # this is for the daemon pid creation
  1523. #shellcheck disable=SC2086
  1524. echo $! > "${jsvcpidfile}" 2>/dev/null
  1525. if [[ $? -gt 0 ]]; then
  1526. hadoop_error "ERROR: Cannot write ${daemonname} pid ${daemonpidfile}."
  1527. fi
  1528. sleep 1
  1529. #shellcheck disable=SC2086
  1530. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1531. if [[ $? -gt 0 ]]; then
  1532. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1533. fi
  1534. if [[ -f "${daemonpidfile}" ]]; then
  1535. #shellcheck disable=SC2046
  1536. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  1537. if [[ $? -gt 0 ]]; then
  1538. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  1539. fi
  1540. fi
  1541. #shellcheck disable=SC2046
  1542. disown %+ >/dev/null 2>&1
  1543. if [[ $? -gt 0 ]]; then
  1544. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1545. fi
  1546. # capture the ulimit output
  1547. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  1548. #shellcheck disable=SC2086
  1549. if ! ps -p $! >/dev/null 2>&1; then
  1550. return 1
  1551. fi
  1552. return 0
  1553. }
  1554. ## @description Stop the non-privileged `command` daemon with that
  1555. ## @description that is running at `pidfile`.
  1556. ## @audience public
  1557. ## @stability stable
  1558. ## @replaceable yes
  1559. ## @param command
  1560. ## @param pidfile
  1561. function hadoop_stop_daemon
  1562. {
  1563. local cmd=$1
  1564. local pidfile=$2
  1565. shift 2
  1566. local pid
  1567. local cur_pid
  1568. if [[ -f "${pidfile}" ]]; then
  1569. pid=$(cat "$pidfile")
  1570. kill "${pid}" >/dev/null 2>&1
  1571. sleep "${HADOOP_STOP_TIMEOUT}"
  1572. if kill -0 "${pid}" > /dev/null 2>&1; then
  1573. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  1574. kill -9 "${pid}" >/dev/null 2>&1
  1575. fi
  1576. if ps -p "${pid}" > /dev/null 2>&1; then
  1577. hadoop_error "ERROR: Unable to kill ${pid}"
  1578. else
  1579. cur_pid=$(cat "$pidfile")
  1580. if [[ "${pid}" = "${cur_pid}" ]]; then
  1581. rm -f "${pidfile}" >/dev/null 2>&1
  1582. else
  1583. hadoop_error "WARNING: pid has changed for ${cmd}, skip deleting pid file"
  1584. fi
  1585. fi
  1586. fi
  1587. }
  1588. ## @description Stop the privileged `command` daemon with that
  1589. ## @description that is running at `daemonpidfile` and launched with
  1590. ## @description the wrapper at `wrapperpidfile`.
  1591. ## @audience public
  1592. ## @stability stable
  1593. ## @replaceable yes
  1594. ## @param command
  1595. ## @param daemonpidfile
  1596. ## @param wrapperpidfile
  1597. function hadoop_stop_secure_daemon
  1598. {
  1599. local command=$1
  1600. local daemonpidfile=$2
  1601. local privpidfile=$3
  1602. shift 3
  1603. local ret
  1604. local daemon_pid
  1605. local priv_pid
  1606. local cur_daemon_pid
  1607. local cur_priv_pid
  1608. daemon_pid=$(cat "$daemonpidfile")
  1609. priv_pid=$(cat "$privpidfile")
  1610. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  1611. ret=$?
  1612. cur_daemon_pid=$(cat "$daemonpidfile")
  1613. cur_priv_pid=$(cat "$privpidfile")
  1614. if [[ "${daemon_pid}" = "${cur_daemon_pid}" ]]; then
  1615. rm -f "${daemonpidfile}" >/dev/null 2>&1
  1616. else
  1617. hadoop_error "WARNING: daemon pid has changed for ${command}, skip deleting daemon pid file"
  1618. fi
  1619. if [[ "${priv_pid}" = "${cur_priv_pid}" ]]; then
  1620. rm -f "${privpidfile}" >/dev/null 2>&1
  1621. else
  1622. hadoop_error "WARNING: priv pid has changed for ${command}, skip deleting priv pid file"
  1623. fi
  1624. return ${ret}
  1625. }
  1626. ## @description Manage a non-privileged daemon.
  1627. ## @audience private
  1628. ## @stability evolving
  1629. ## @replaceable yes
  1630. ## @param [start|stop|status|default]
  1631. ## @param command
  1632. ## @param class
  1633. ## @param daemonpidfile
  1634. ## @param daemonoutfile
  1635. ## @param [options]
  1636. function hadoop_daemon_handler
  1637. {
  1638. local daemonmode=$1
  1639. local daemonname=$2
  1640. local class=$3
  1641. local daemon_pidfile=$4
  1642. local daemon_outfile=$5
  1643. shift 5
  1644. case ${daemonmode} in
  1645. status)
  1646. hadoop_status_daemon "${daemon_pidfile}"
  1647. exit $?
  1648. ;;
  1649. stop)
  1650. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  1651. exit $?
  1652. ;;
  1653. ##COMPAT -- older hadoops would also start daemons by default
  1654. start|default)
  1655. hadoop_verify_piddir
  1656. hadoop_verify_logdir
  1657. hadoop_status_daemon "${daemon_pidfile}"
  1658. if [[ $? == 0 ]]; then
  1659. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1660. exit 1
  1661. else
  1662. # stale pid file, so just remove it and continue on
  1663. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1664. fi
  1665. ##COMPAT - differenticate between --daemon start and nothing
  1666. # "nothing" shouldn't detach
  1667. if [[ "$daemonmode" = "default" ]]; then
  1668. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  1669. else
  1670. hadoop_start_daemon_wrapper "${daemonname}" \
  1671. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  1672. fi
  1673. ;;
  1674. esac
  1675. }
  1676. ## @description Manage a privileged daemon.
  1677. ## @audience private
  1678. ## @stability evolving
  1679. ## @replaceable yes
  1680. ## @param [start|stop|status|default]
  1681. ## @param command
  1682. ## @param class
  1683. ## @param daemonpidfile
  1684. ## @param daemonoutfile
  1685. ## @param wrapperpidfile
  1686. ## @param wrapperoutfile
  1687. ## @param wrappererrfile
  1688. ## @param [options]
  1689. function hadoop_secure_daemon_handler
  1690. {
  1691. local daemonmode=$1
  1692. local daemonname=$2
  1693. local classname=$3
  1694. local daemon_pidfile=$4
  1695. local daemon_outfile=$5
  1696. local priv_pidfile=$6
  1697. local priv_outfile=$7
  1698. local priv_errfile=$8
  1699. shift 8
  1700. case ${daemonmode} in
  1701. status)
  1702. hadoop_status_daemon "${daemon_pidfile}"
  1703. exit $?
  1704. ;;
  1705. stop)
  1706. hadoop_stop_secure_daemon "${daemonname}" \
  1707. "${daemon_pidfile}" "${priv_pidfile}"
  1708. exit $?
  1709. ;;
  1710. ##COMPAT -- older hadoops would also start daemons by default
  1711. start|default)
  1712. hadoop_verify_piddir
  1713. hadoop_verify_logdir
  1714. hadoop_status_daemon "${daemon_pidfile}"
  1715. if [[ $? == 0 ]]; then
  1716. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1717. exit 1
  1718. else
  1719. # stale pid file, so just remove it and continue on
  1720. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1721. fi
  1722. ##COMPAT - differenticate between --daemon start and nothing
  1723. # "nothing" shouldn't detach
  1724. if [[ "${daemonmode}" = "default" ]]; then
  1725. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  1726. "${daemon_pidfile}" "${daemon_outfile}" \
  1727. "${priv_errfile}" "${priv_pidfile}" "$@"
  1728. else
  1729. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  1730. "${daemon_pidfile}" "${daemon_outfile}" \
  1731. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  1732. fi
  1733. ;;
  1734. esac
  1735. }
  1736. ## @description Verify that ${USER} is allowed to execute the
  1737. ## @description given subcommand.
  1738. ## @audience public
  1739. ## @stability stable
  1740. ## @replaceable yes
  1741. ## @param subcommand
  1742. ## @return will exit on failure conditions
  1743. function hadoop_verify_user
  1744. {
  1745. local command=$1
  1746. local uservar="HADOOP_${command}_USER"
  1747. if [[ -n ${!uservar} ]]; then
  1748. if [[ ${!uservar} != "${USER}" ]]; then
  1749. hadoop_error "ERROR: ${command} can only be executed by ${!uservar}."
  1750. exit 1
  1751. fi
  1752. fi
  1753. }
  1754. ## @description Perform the 'hadoop classpath', etc subcommand with the given
  1755. ## @description parameters
  1756. ## @audience private
  1757. ## @stability evolving
  1758. ## @replaceable yes
  1759. ## @param [parameters]
  1760. ## @return will print & exit with no params
  1761. function hadoop_do_classpath_subcommand
  1762. {
  1763. if [[ "$#" -gt 1 ]]; then
  1764. eval "$1"=org.apache.hadoop.util.Classpath
  1765. else
  1766. hadoop_finalize
  1767. echo "${CLASSPATH}"
  1768. exit 0
  1769. fi
  1770. }
  1771. ## @description generic shell script opton parser. sets
  1772. ## @description HADOOP_PARSE_COUNTER to set number the
  1773. ## @description caller should shift
  1774. ## @audience private
  1775. ## @stability evolving
  1776. ## @replaceable yes
  1777. ## @param [parameters, typically "$@"]
  1778. function hadoop_parse_args
  1779. {
  1780. HADOOP_DAEMON_MODE="default"
  1781. HADOOP_PARSE_COUNTER=0
  1782. # not all of the options supported here are supported by all commands
  1783. # however these are:
  1784. hadoop_add_option "--config dir" "Hadoop config directory"
  1785. hadoop_add_option "--debug" "turn on shell script debug mode"
  1786. hadoop_add_option "--help" "usage information"
  1787. while true; do
  1788. hadoop_debug "hadoop_parse_args: processing $1"
  1789. case $1 in
  1790. --buildpaths)
  1791. # shellcheck disable=SC2034
  1792. HADOOP_ENABLE_BUILD_PATHS=true
  1793. shift
  1794. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1795. ;;
  1796. --config)
  1797. shift
  1798. confdir=$1
  1799. shift
  1800. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1801. if [[ -d "${confdir}" ]]; then
  1802. # shellcheck disable=SC2034
  1803. HADOOP_CONF_DIR="${confdir}"
  1804. elif [[ -z "${confdir}" ]]; then
  1805. hadoop_error "ERROR: No parameter provided for --config "
  1806. hadoop_exit_with_usage 1
  1807. else
  1808. hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
  1809. hadoop_exit_with_usage 1
  1810. fi
  1811. ;;
  1812. --daemon)
  1813. shift
  1814. HADOOP_DAEMON_MODE=$1
  1815. shift
  1816. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1817. if [[ -z "${HADOOP_DAEMON_MODE}" || \
  1818. ! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
  1819. hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
  1820. hadoop_exit_with_usage 1
  1821. fi
  1822. ;;
  1823. --debug)
  1824. shift
  1825. # shellcheck disable=SC2034
  1826. HADOOP_SHELL_SCRIPT_DEBUG=true
  1827. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1828. ;;
  1829. --help|-help|-h|help|--h|--\?|-\?|\?)
  1830. hadoop_exit_with_usage 0
  1831. ;;
  1832. --hostnames)
  1833. shift
  1834. # shellcheck disable=SC2034
  1835. HADOOP_SLAVE_NAMES="$1"
  1836. shift
  1837. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1838. ;;
  1839. --hosts)
  1840. shift
  1841. hadoop_populate_slaves_file "$1"
  1842. shift
  1843. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1844. ;;
  1845. --loglevel)
  1846. shift
  1847. # shellcheck disable=SC2034
  1848. HADOOP_LOGLEVEL="$1"
  1849. shift
  1850. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1851. ;;
  1852. --slaves)
  1853. shift
  1854. # shellcheck disable=SC2034
  1855. HADOOP_SLAVE_MODE=true
  1856. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1857. ;;
  1858. *)
  1859. break
  1860. ;;
  1861. esac
  1862. done
  1863. hadoop_debug "hadoop_parse: asking caller to skip ${HADOOP_PARSE_COUNTER}"
  1864. }