hadoop-functions.sh 59 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # we need to declare this globally as an array, which can only
  17. # be done outside of a function
  18. declare -a HADOOP_SUBCMD_USAGE
  19. declare -a HADOOP_OPTION_USAGE
  20. ## @description Print a message to stderr
  21. ## @audience public
  22. ## @stability stable
  23. ## @replaceable no
  24. ## @param string
  25. function hadoop_error
  26. {
  27. echo "$*" 1>&2
  28. }
  29. ## @description Print a message to stderr if --debug is turned on
  30. ## @audience public
  31. ## @stability stable
  32. ## @replaceable no
  33. ## @param string
  34. function hadoop_debug
  35. {
  36. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  37. echo "DEBUG: $*" 1>&2
  38. fi
  39. }
  40. ## @description Given variable $1 delete $2 from it
  41. ## @audience public
  42. ## @stability stable
  43. ## @replaceable no
  44. function hadoop_delete_entry
  45. {
  46. if [[ ${!1} =~ \ ${2}\ ]] ; then
  47. hadoop_debug "Removing ${2} from ${1}"
  48. eval "${1}"=\""${!1// ${2} }"\"
  49. fi
  50. }
  51. ## @description Given variable $1 add $2 to it
  52. ## @audience public
  53. ## @stability stable
  54. ## @replaceable no
  55. function hadoop_add_entry
  56. {
  57. if [[ ! ${!1} =~ \ ${2}\ ]] ; then
  58. hadoop_debug "Adding ${2} to ${1}"
  59. #shellcheck disable=SC2140
  60. eval "${1}"=\""${!1} ${2} "\"
  61. fi
  62. }
  63. ## @description Given variable $1 determine if $2 is in it
  64. ## @audience public
  65. ## @stability stable
  66. ## @replaceable no
  67. ## @return 0 = yes, 1 = no
  68. function hadoop_verify_entry
  69. {
  70. # this unfortunately can't really be tested by bats. :(
  71. # so if this changes, be aware that unit tests effectively
  72. # do this function in them
  73. [[ ${!1} =~ \ ${2}\ ]]
  74. }
  75. ## @description Add a subcommand to the usage output
  76. ## @audience private
  77. ## @stability evolving
  78. ## @replaceable no
  79. ## @param subcommand
  80. ## @param subcommanddesc
  81. function hadoop_add_subcommand
  82. {
  83. local subcmd=$1
  84. local text=$2
  85. HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${text}"
  86. ((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
  87. }
  88. ## @description Add an option to the usage output
  89. ## @audience private
  90. ## @stability evolving
  91. ## @replaceable no
  92. ## @param subcommand
  93. ## @param subcommanddesc
  94. function hadoop_add_option
  95. {
  96. local option=$1
  97. local text=$2
  98. HADOOP_OPTION_USAGE[${HADOOP_OPTION_USAGE_COUNTER}]="${option}@${text}"
  99. ((HADOOP_OPTION_USAGE_COUNTER=HADOOP_OPTION_USAGE_COUNTER+1))
  100. }
  101. ## @description Reset the usage information to blank
  102. ## @audience private
  103. ## @stability evolving
  104. ## @replaceable no
  105. function hadoop_reset_usage
  106. {
  107. HADOOP_SUBCMD_USAGE=()
  108. HADOOP_OPTION_USAGE=()
  109. HADOOP_SUBCMD_USAGE_COUNTER=0
  110. HADOOP_OPTION_USAGE_COUNTER=0
  111. }
  112. ## @description Print a screen-size aware two-column output
  113. ## @audience private
  114. ## @stability evolving
  115. ## @replaceable no
  116. ## @param array
  117. function hadoop_generic_columnprinter
  118. {
  119. declare -a input=("$@")
  120. declare -i i=0
  121. declare -i counter=0
  122. declare line
  123. declare text
  124. declare option
  125. declare giventext
  126. declare -i maxoptsize
  127. declare -i foldsize
  128. declare -a tmpa
  129. declare numcols
  130. if [[ -n "${COLUMNS}" ]]; then
  131. numcols=${COLUMNS}
  132. else
  133. numcols=$(tput cols) 2>/dev/null
  134. fi
  135. if [[ -z "${numcols}"
  136. || ! "${numcols}" =~ ^[0-9]+$ ]]; then
  137. numcols=75
  138. else
  139. ((numcols=numcols-5))
  140. fi
  141. while read -r line; do
  142. tmpa[${counter}]=${line}
  143. ((counter=counter+1))
  144. option=$(echo "${line}" | cut -f1 -d'@')
  145. if [[ ${#option} -gt ${maxoptsize} ]]; then
  146. maxoptsize=${#option}
  147. fi
  148. done < <(for text in "${input[@]}"; do
  149. echo "${text}"
  150. done | sort)
  151. i=0
  152. ((foldsize=numcols-maxoptsize))
  153. until [[ $i -eq ${#tmpa[@]} ]]; do
  154. option=$(echo "${tmpa[$i]}" | cut -f1 -d'@')
  155. giventext=$(echo "${tmpa[$i]}" | cut -f2 -d'@')
  156. while read -r line; do
  157. printf "%-${maxoptsize}s %-s\n" "${option}" "${line}"
  158. option=" "
  159. done < <(echo "${giventext}"| fold -s -w ${foldsize})
  160. ((i=i+1))
  161. done
  162. }
  163. ## @description generate standard usage output
  164. ## @description and optionally takes a class
  165. ## @audience private
  166. ## @stability evolving
  167. ## @replaceable no
  168. ## @param execname
  169. ## @param true|false
  170. ## @param [text to use in place of SUBCOMMAND]
  171. function hadoop_generate_usage
  172. {
  173. local cmd=$1
  174. local takesclass=$2
  175. local subcmdtext=${3:-"SUBCOMMAND"}
  176. local haveoptions
  177. local optstring
  178. local havesubs
  179. local subcmdstring
  180. cmd=${cmd##*/}
  181. if [[ -n "${HADOOP_OPTION_USAGE_COUNTER}"
  182. && "${HADOOP_OPTION_USAGE_COUNTER}" -gt 0 ]]; then
  183. haveoptions=true
  184. optstring=" [OPTIONS]"
  185. fi
  186. if [[ -n "${HADOOP_SUBCMD_USAGE_COUNTER}"
  187. && "${HADOOP_SUBCMD_USAGE_COUNTER}" -gt 0 ]]; then
  188. havesubs=true
  189. subcmdstring=" ${subcmdtext} [${subcmdtext} OPTIONS]"
  190. fi
  191. echo "Usage: ${cmd}${optstring}${subcmdstring}"
  192. if [[ ${takesclass} = true ]]; then
  193. echo " or ${cmd}${optstring} CLASSNAME [CLASSNAME OPTIONS]"
  194. echo " where CLASSNAME is a user-provided Java class"
  195. fi
  196. if [[ "${haveoptions}" = true ]]; then
  197. echo ""
  198. echo " OPTIONS is none or any of:"
  199. echo ""
  200. hadoop_generic_columnprinter "${HADOOP_OPTION_USAGE[@]}"
  201. fi
  202. if [[ "${havesubs}" = true ]]; then
  203. echo ""
  204. echo " ${subcmdtext} is one of:"
  205. echo ""
  206. hadoop_generic_columnprinter "${HADOOP_SUBCMD_USAGE[@]}"
  207. echo ""
  208. echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
  209. fi
  210. }
  211. ## @description Replace `oldvar` with `newvar` if `oldvar` exists.
  212. ## @audience public
  213. ## @stability stable
  214. ## @replaceable yes
  215. ## @param oldvar
  216. ## @param newvar
  217. function hadoop_deprecate_envvar
  218. {
  219. local oldvar=$1
  220. local newvar=$2
  221. local oldval=${!oldvar}
  222. local newval=${!newvar}
  223. if [[ -n "${oldval}" ]]; then
  224. hadoop_error "WARNING: ${oldvar} has been replaced by ${newvar}. Using value of ${oldvar}."
  225. # shellcheck disable=SC2086
  226. eval ${newvar}=\"${oldval}\"
  227. # shellcheck disable=SC2086
  228. newval=${oldval}
  229. # shellcheck disable=SC2086
  230. eval ${newvar}=\"${newval}\"
  231. fi
  232. }
  233. ## @description Bootstraps the Hadoop shell environment
  234. ## @audience private
  235. ## @stability evolving
  236. ## @replaceable no
  237. function hadoop_bootstrap
  238. {
  239. # the root of the Hadoop installation
  240. # See HADOOP-6255 for the expected directory structure layout
  241. if [[ -n "${DEFAULT_LIBEXEC_DIR}" ]]; then
  242. hadoop_error "WARNING: DEFAULT_LIBEXEC_DIR ignored. It has been replaced by HADOOP_DEFAULT_LIBEXEC_DIR."
  243. fi
  244. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  245. # We can piggyback off of that to figure out where the default
  246. # HADOOP_FREFIX should be. This allows us to run without
  247. # HADOOP_PREFIX ever being defined by a human! As a consequence
  248. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  249. # env var within Hadoop.
  250. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  251. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  252. exit 1
  253. fi
  254. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  255. HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_DEFAULT_PREFIX}
  256. export HADOOP_PREFIX
  257. #
  258. # short-cuts. vendors may redefine these as well, preferably
  259. # in hadoop-layouts.sh
  260. #
  261. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  262. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  263. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  264. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  265. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  266. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  267. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  268. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  269. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  270. HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_PREFIX}}
  271. HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
  272. HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}
  273. # usage output set to zero
  274. hadoop_reset_usage
  275. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  276. # defaults
  277. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  278. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  279. }
  280. ## @description Locate Hadoop's configuration directory
  281. ## @audience private
  282. ## @stability evolving
  283. ## @replaceable no
  284. function hadoop_find_confdir
  285. {
  286. local conf_dir
  287. # An attempt at compatibility with some Hadoop 1.x
  288. # installs.
  289. if [[ -e "${HADOOP_PREFIX}/conf/hadoop-env.sh" ]]; then
  290. conf_dir="conf"
  291. else
  292. conf_dir="etc/hadoop"
  293. fi
  294. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_PREFIX}/${conf_dir}}"
  295. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  296. }
  297. ## @description Validate ${HADOOP_CONF_DIR}
  298. ## @audience public
  299. ## @stability stable
  300. ## @replaceable yes
  301. ## @return will exit on failure conditions
  302. function hadoop_verify_confdir
  303. {
  304. # Check only log4j.properties by default.
  305. # --loglevel does not work without logger settings in log4j.log4j.properties.
  306. if [[ ! -f "${HADOOP_CONF_DIR}/log4j.properties" ]]; then
  307. hadoop_error "WARNING: log4j.properties is not found. HADOOP_CONF_DIR may be incomplete."
  308. fi
  309. }
  310. ## @description Import the hadoop-env.sh settings
  311. ## @audience private
  312. ## @stability evolving
  313. ## @replaceable no
  314. function hadoop_exec_hadoopenv
  315. {
  316. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  317. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  318. export HADOOP_ENV_PROCESSED=true
  319. # shellcheck disable=SC1090
  320. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  321. fi
  322. fi
  323. }
  324. ## @description Import the replaced functions
  325. ## @audience private
  326. ## @stability evolving
  327. ## @replaceable no
  328. function hadoop_exec_userfuncs
  329. {
  330. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  331. # shellcheck disable=SC1090
  332. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  333. fi
  334. }
  335. ## @description Read the user's settings. This provides for users to
  336. ## @description override and/or append hadoop-env.sh. It is not meant
  337. ## @description as a complete system override.
  338. ## @audience private
  339. ## @stability evolving
  340. ## @replaceable yes
  341. function hadoop_exec_hadooprc
  342. {
  343. if [[ -f "${HOME}/.hadooprc" ]]; then
  344. hadoop_debug "Applying the user's .hadooprc"
  345. # shellcheck disable=SC1090
  346. . "${HOME}/.hadooprc"
  347. fi
  348. }
  349. ## @description Import shellprofile.d content
  350. ## @audience private
  351. ## @stability evolving
  352. ## @replaceable yes
  353. function hadoop_import_shellprofiles
  354. {
  355. local i
  356. local files1
  357. local files2
  358. if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
  359. files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
  360. hadoop_debug "shellprofiles: ${files1[*]}"
  361. else
  362. hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
  363. fi
  364. if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
  365. files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
  366. fi
  367. # enable bundled shellprofiles that come
  368. # from hadoop-tools. This converts the user-facing HADOOP_OPTIONAL_TOOLS
  369. # to the HADOOP_TOOLS_OPTIONS that the shell profiles expect.
  370. # See dist-tools-hooks-maker for how the example HADOOP_OPTIONAL_TOOLS
  371. # gets populated into hadoop-env.sh
  372. for i in ${HADOOP_OPTIONAL_TOOLS//,/ }; do
  373. hadoop_add_entry HADOOP_TOOLS_OPTIONS "${i}"
  374. done
  375. for i in "${files1[@]}" "${files2[@]}"
  376. do
  377. if [[ -n "${i}"
  378. && -f "${i}" ]]; then
  379. hadoop_debug "Profiles: importing ${i}"
  380. # shellcheck disable=SC1090
  381. . "${i}"
  382. fi
  383. done
  384. }
  385. ## @description Initialize the registered shell profiles
  386. ## @audience private
  387. ## @stability evolving
  388. ## @replaceable yes
  389. function hadoop_shellprofiles_init
  390. {
  391. local i
  392. for i in ${HADOOP_SHELL_PROFILES}
  393. do
  394. if declare -F _${i}_hadoop_init >/dev/null ; then
  395. hadoop_debug "Profiles: ${i} init"
  396. # shellcheck disable=SC2086
  397. _${i}_hadoop_init
  398. fi
  399. done
  400. }
  401. ## @description Apply the shell profile classpath additions
  402. ## @audience private
  403. ## @stability evolving
  404. ## @replaceable yes
  405. function hadoop_shellprofiles_classpath
  406. {
  407. local i
  408. for i in ${HADOOP_SHELL_PROFILES}
  409. do
  410. if declare -F _${i}_hadoop_classpath >/dev/null ; then
  411. hadoop_debug "Profiles: ${i} classpath"
  412. # shellcheck disable=SC2086
  413. _${i}_hadoop_classpath
  414. fi
  415. done
  416. }
  417. ## @description Apply the shell profile native library additions
  418. ## @audience private
  419. ## @stability evolving
  420. ## @replaceable yes
  421. function hadoop_shellprofiles_nativelib
  422. {
  423. local i
  424. for i in ${HADOOP_SHELL_PROFILES}
  425. do
  426. if declare -F _${i}_hadoop_nativelib >/dev/null ; then
  427. hadoop_debug "Profiles: ${i} nativelib"
  428. # shellcheck disable=SC2086
  429. _${i}_hadoop_nativelib
  430. fi
  431. done
  432. }
  433. ## @description Apply the shell profile final configuration
  434. ## @audience private
  435. ## @stability evolving
  436. ## @replaceable yes
  437. function hadoop_shellprofiles_finalize
  438. {
  439. local i
  440. for i in ${HADOOP_SHELL_PROFILES}
  441. do
  442. if declare -F _${i}_hadoop_finalize >/dev/null ; then
  443. hadoop_debug "Profiles: ${i} finalize"
  444. # shellcheck disable=SC2086
  445. _${i}_hadoop_finalize
  446. fi
  447. done
  448. }
  449. ## @description Initialize the Hadoop shell environment, now that
  450. ## @description user settings have been imported
  451. ## @audience private
  452. ## @stability evolving
  453. ## @replaceable no
  454. function hadoop_basic_init
  455. {
  456. # Some of these are also set in hadoop-env.sh.
  457. # we still set them here just in case hadoop-env.sh is
  458. # broken in some way, set up defaults, etc.
  459. #
  460. # but it is important to note that if you update these
  461. # you also need to update hadoop-env.sh as well!!!
  462. CLASSPATH=""
  463. hadoop_debug "Initialize CLASSPATH"
  464. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  465. [[ -d "${HADOOP_PREFIX}/${HADOOP_COMMON_DIR}" ]]; then
  466. export HADOOP_COMMON_HOME="${HADOOP_PREFIX}"
  467. fi
  468. # default policy file for service-level authorization
  469. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  470. # define HADOOP_HDFS_HOME
  471. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  472. [[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then
  473. export HADOOP_HDFS_HOME="${HADOOP_PREFIX}"
  474. fi
  475. # define HADOOP_YARN_HOME
  476. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  477. [[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then
  478. export HADOOP_YARN_HOME="${HADOOP_PREFIX}"
  479. fi
  480. # define HADOOP_MAPRED_HOME
  481. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  482. [[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then
  483. export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
  484. fi
  485. if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
  486. hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
  487. exit 1
  488. fi
  489. if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
  490. hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
  491. exit 1
  492. fi
  493. if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
  494. hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
  495. exit 1
  496. fi
  497. if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
  498. hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
  499. exit 1
  500. fi
  501. # if for some reason the shell doesn't have $USER defined
  502. # let's define it as 'hadoop'
  503. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  504. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-hadoop}
  505. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
  506. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  507. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  508. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  509. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  510. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  511. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  512. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  513. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  514. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS:-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  515. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  516. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  517. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  518. }
  519. ## @description Set the slave support information to the contents
  520. ## @description of `filename`
  521. ## @audience public
  522. ## @stability stable
  523. ## @replaceable no
  524. ## @param filename
  525. ## @return will exit if file does not exist
  526. function hadoop_populate_slaves_file
  527. {
  528. local slavesfile=$1
  529. shift
  530. if [[ -f "${slavesfile}" ]]; then
  531. # shellcheck disable=2034
  532. HADOOP_SLAVES="${slavesfile}"
  533. elif [[ -f "${HADOOP_CONF_DIR}/${slavesfile}" ]]; then
  534. # shellcheck disable=2034
  535. HADOOP_SLAVES="${HADOOP_CONF_DIR}/${slavesfile}"
  536. else
  537. hadoop_error "ERROR: Cannot find hosts file \"${slavesfile}\""
  538. hadoop_exit_with_usage 1
  539. fi
  540. }
  541. ## @description Rotates the given `file` until `number` of
  542. ## @description files exist.
  543. ## @audience public
  544. ## @stability stable
  545. ## @replaceable no
  546. ## @param filename
  547. ## @param [number]
  548. ## @return $? will contain last mv's return value
  549. function hadoop_rotate_log
  550. {
  551. #
  552. # Users are likely to replace this one for something
  553. # that gzips or uses dates or who knows what.
  554. #
  555. # be aware that &1 and &2 might go through here
  556. # so don't do anything too crazy...
  557. #
  558. local log=$1;
  559. local num=${2:-5};
  560. if [[ -f "${log}" ]]; then # rotate logs
  561. while [[ ${num} -gt 1 ]]; do
  562. #shellcheck disable=SC2086
  563. let prev=${num}-1
  564. if [[ -f "${log}.${prev}" ]]; then
  565. mv "${log}.${prev}" "${log}.${num}"
  566. fi
  567. num=${prev}
  568. done
  569. mv "${log}" "${log}.${num}"
  570. fi
  571. }
  572. ## @description Via ssh, log into `hostname` and run `command`
  573. ## @audience private
  574. ## @stability evolving
  575. ## @replaceable yes
  576. ## @param hostname
  577. ## @param command
  578. ## @param [...]
  579. function hadoop_actual_ssh
  580. {
  581. # we are passing this function to xargs
  582. # should get hostname followed by rest of command line
  583. local slave=$1
  584. shift
  585. # shellcheck disable=SC2086
  586. ssh ${HADOOP_SSH_OPTS} ${slave} $"${@// /\\ }" 2>&1 | sed "s/^/$slave: /"
  587. }
  588. ## @description Connect to ${HADOOP_SLAVES} or ${HADOOP_SLAVE_NAMES}
  589. ## @description and execute command.
  590. ## @audience private
  591. ## @stability evolving
  592. ## @replaceable yes
  593. ## @param command
  594. ## @param [...]
  595. function hadoop_connect_to_hosts
  596. {
  597. # shellcheck disable=SC2124
  598. local params="$@"
  599. local slave_file
  600. local tmpslvnames
  601. #
  602. # ssh (or whatever) to a host
  603. #
  604. # User can specify hostnames or a file where the hostnames are (not both)
  605. if [[ -n "${HADOOP_SLAVES}" && -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  606. hadoop_error "ERROR: Both HADOOP_SLAVES and HADOOP_SLAVE_NAME were defined. Aborting."
  607. exit 1
  608. elif [[ -z "${HADOOP_SLAVE_NAMES}" ]]; then
  609. slave_file=${HADOOP_SLAVES:-${HADOOP_CONF_DIR}/slaves}
  610. fi
  611. # if pdsh is available, let's use it. otherwise default
  612. # to a loop around ssh. (ugh)
  613. if [[ -e '/usr/bin/pdsh' ]]; then
  614. if [[ -z "${HADOOP_SLAVE_NAMES}" ]] ; then
  615. # if we were given a file, just let pdsh deal with it.
  616. # shellcheck disable=SC2086
  617. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  618. -f "${HADOOP_SSH_PARALLEL}" -w ^"${slave_file}" $"${@// /\\ }" 2>&1
  619. else
  620. # no spaces allowed in the pdsh arg host list
  621. # shellcheck disable=SC2086
  622. tmpslvnames=$(echo ${SLAVE_NAMES} | tr -s ' ' ,)
  623. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  624. -f "${HADOOP_SSH_PARALLEL}" \
  625. -w "${tmpslvnames}" $"${@// /\\ }" 2>&1
  626. fi
  627. else
  628. if [[ -z "${HADOOP_SLAVE_NAMES}" ]]; then
  629. HADOOP_SLAVE_NAMES=$(sed 's/#.*$//;/^$/d' "${slave_file}")
  630. fi
  631. hadoop_connect_to_hosts_without_pdsh "${params}"
  632. fi
  633. }
  634. ## @description Connect to ${SLAVE_NAMES} and execute command
  635. ## @description under the environment which does not support pdsh.
  636. ## @audience private
  637. ## @stability evolving
  638. ## @replaceable yes
  639. ## @param command
  640. ## @param [...]
  641. function hadoop_connect_to_hosts_without_pdsh
  642. {
  643. # shellcheck disable=SC2124
  644. local params="$@"
  645. local slaves=(${HADOOP_SLAVE_NAMES})
  646. for (( i = 0; i < ${#slaves[@]}; i++ ))
  647. do
  648. if (( i != 0 && i % HADOOP_SSH_PARALLEL == 0 )); then
  649. wait
  650. fi
  651. # shellcheck disable=SC2086
  652. hadoop_actual_ssh "${slaves[$i]}" ${params} &
  653. done
  654. wait
  655. }
  656. ## @description Utility routine to handle --slaves mode
  657. ## @audience private
  658. ## @stability evolving
  659. ## @replaceable yes
  660. ## @param commandarray
  661. function hadoop_common_slave_mode_execute
  662. {
  663. #
  664. # input should be the command line as given by the user
  665. # in the form of an array
  666. #
  667. local argv=("$@")
  668. # if --slaves is still on the command line, remove it
  669. # to prevent loops
  670. # Also remove --hostnames and --hosts along with arg values
  671. local argsSize=${#argv[@]};
  672. for (( i = 0; i < argsSize; i++ ))
  673. do
  674. if [[ "${argv[$i]}" =~ ^--slaves$ ]]; then
  675. unset argv[$i]
  676. elif [[ "${argv[$i]}" =~ ^--hostnames$ ]] ||
  677. [[ "${argv[$i]}" =~ ^--hosts$ ]]; then
  678. unset argv[$i];
  679. let i++;
  680. unset argv[$i];
  681. fi
  682. done
  683. if [[ ${QATESTMODE} = true ]]; then
  684. echo "${argv[@]}"
  685. return
  686. fi
  687. hadoop_connect_to_hosts -- "${argv[@]}"
  688. }
  689. ## @description Verify that a shell command was passed a valid
  690. ## @description class name
  691. ## @audience public
  692. ## @stability stable
  693. ## @replaceable yes
  694. ## @param classname
  695. ## @return 0 = success
  696. ## @return 1 = failure w/user message
  697. function hadoop_validate_classname
  698. {
  699. local class=$1
  700. shift 1
  701. if [[ ! ${class} =~ \. ]]; then
  702. # assuming the arg is typo of command if it does not conatain ".".
  703. # class belonging to no package is not allowed as a result.
  704. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  705. return 1
  706. fi
  707. return 0
  708. }
  709. ## @description Append the `appendstring` if `checkstring` is not
  710. ## @description present in the given `envvar`
  711. ## @audience public
  712. ## @stability stable
  713. ## @replaceable yes
  714. ## @param envvar
  715. ## @param checkstring
  716. ## @param appendstring
  717. function hadoop_add_param
  718. {
  719. #
  720. # general param dedupe..
  721. # $1 is what we are adding to
  722. # $2 is the name of what we want to add (key)
  723. # $3 is the key+value of what we're adding
  724. #
  725. # doing it this way allows us to support all sorts of
  726. # different syntaxes, just so long as they are space
  727. # delimited
  728. #
  729. if [[ ! ${!1} =~ $2 ]] ; then
  730. #shellcheck disable=SC2140
  731. eval "$1"="'${!1} $3'"
  732. if [[ ${!1:0:1} = ' ' ]]; then
  733. #shellcheck disable=SC2140
  734. eval "$1"="'${!1# }'"
  735. fi
  736. hadoop_debug "$1 accepted $3"
  737. else
  738. hadoop_debug "$1 declined $3"
  739. fi
  740. }
  741. ## @description Register the given `shellprofile` to the Hadoop
  742. ## @description shell subsystem
  743. ## @audience public
  744. ## @stability stable
  745. ## @replaceable yes
  746. ## @param shellprofile
  747. function hadoop_add_profile
  748. {
  749. # shellcheck disable=SC2086
  750. hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
  751. }
  752. ## @description Add a file system object (directory, file,
  753. ## @description wildcard, ...) to the classpath. Optionally provide
  754. ## @description a hint as to where in the classpath it should go.
  755. ## @audience public
  756. ## @stability stable
  757. ## @replaceable yes
  758. ## @param object
  759. ## @param [before|after]
  760. ## @return 0 = success (added or duplicate)
  761. ## @return 1 = failure (doesn't exist or some other reason)
  762. function hadoop_add_classpath
  763. {
  764. # However, with classpath (& JLP), we can do dedupe
  765. # along with some sanity checking (e.g., missing directories)
  766. # since we have a better idea of what is legal
  767. #
  768. # for wildcard at end, we can
  769. # at least check the dir exists
  770. if [[ $1 =~ ^.*\*$ ]]; then
  771. local mp
  772. mp=$(dirname "$1")
  773. if [[ ! -d "${mp}" ]]; then
  774. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  775. return 1
  776. fi
  777. # no wildcard in the middle, so check existence
  778. # (doesn't matter *what* it is)
  779. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  780. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  781. return 1
  782. fi
  783. if [[ -z "${CLASSPATH}" ]]; then
  784. CLASSPATH=$1
  785. hadoop_debug "Initial CLASSPATH=$1"
  786. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  787. if [[ "$2" = "before" ]]; then
  788. CLASSPATH="$1:${CLASSPATH}"
  789. hadoop_debug "Prepend CLASSPATH: $1"
  790. else
  791. CLASSPATH+=:$1
  792. hadoop_debug "Append CLASSPATH: $1"
  793. fi
  794. else
  795. hadoop_debug "Dupe CLASSPATH: $1"
  796. fi
  797. return 0
  798. }
  799. ## @description Add a file system object (directory, file,
  800. ## @description wildcard, ...) to the colonpath. Optionally provide
  801. ## @description a hint as to where in the colonpath it should go.
  802. ## @description Prior to adding, objects are checked for duplication
  803. ## @description and check for existence. Many other functions use
  804. ## @description this function as their base implementation
  805. ## @description including `hadoop_add_javalibpath` and `hadoop_add_ldlibpath`.
  806. ## @audience public
  807. ## @stability stable
  808. ## @replaceable yes
  809. ## @param envvar
  810. ## @param object
  811. ## @param [before|after]
  812. ## @return 0 = success (added or duplicate)
  813. ## @return 1 = failure (doesn't exist or some other reason)
  814. function hadoop_add_colonpath
  815. {
  816. # this is CLASSPATH, JLP, etc but with dedupe but no
  817. # other checking
  818. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  819. if [[ -z "${!1}" ]]; then
  820. # shellcheck disable=SC2086
  821. eval $1="'$2'"
  822. hadoop_debug "Initial colonpath($1): $2"
  823. elif [[ "$3" = "before" ]]; then
  824. # shellcheck disable=SC2086
  825. eval $1="'$2:${!1}'"
  826. hadoop_debug "Prepend colonpath($1): $2"
  827. else
  828. # shellcheck disable=SC2086
  829. eval $1+=":'$2'"
  830. hadoop_debug "Append colonpath($1): $2"
  831. fi
  832. return 0
  833. fi
  834. hadoop_debug "Rejected colonpath($1): $2"
  835. return 1
  836. }
  837. ## @description Add a file system object (directory, file,
  838. ## @description wildcard, ...) to the Java JNI path. Optionally
  839. ## @description provide a hint as to where in the Java JNI path
  840. ## @description it should go.
  841. ## @audience public
  842. ## @stability stable
  843. ## @replaceable yes
  844. ## @param object
  845. ## @param [before|after]
  846. ## @return 0 = success (added or duplicate)
  847. ## @return 1 = failure (doesn't exist or some other reason)
  848. function hadoop_add_javalibpath
  849. {
  850. # specialized function for a common use case
  851. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  852. }
  853. ## @description Add a file system object (directory, file,
  854. ## @description wildcard, ...) to the LD_LIBRARY_PATH. Optionally
  855. ## @description provide a hint as to where in the LD_LIBRARY_PATH
  856. ## @description it should go.
  857. ## @audience public
  858. ## @stability stable
  859. ## @replaceable yes
  860. ## @param object
  861. ## @param [before|after]
  862. ## @return 0 = success (added or duplicate)
  863. ## @return 1 = failure (doesn't exist or some other reason)
  864. function hadoop_add_ldlibpath
  865. {
  866. local status
  867. # specialized function for a common use case
  868. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  869. status=$?
  870. # note that we export this
  871. export LD_LIBRARY_PATH
  872. return ${status}
  873. }
  874. ## @description Add the common/core Hadoop components to the
  875. ## @description environment
  876. ## @audience private
  877. ## @stability evolving
  878. ## @replaceable yes
  879. ## @returns 1 on failure, may exit
  880. ## @returns 0 on success
  881. function hadoop_add_common_to_classpath
  882. {
  883. #
  884. # get all of the common jars+config in the path
  885. #
  886. if [[ -z "${HADOOP_COMMON_HOME}"
  887. || -z "${HADOOP_COMMON_DIR}"
  888. || -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
  889. hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
  890. hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
  891. hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
  892. hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
  893. exit 1
  894. fi
  895. # developers
  896. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  897. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  898. fi
  899. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  900. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  901. }
  902. ## @description Run libexec/tools/module.sh to add to the classpath
  903. ## @description environment
  904. ## @audience private
  905. ## @stability evolving
  906. ## @replaceable yes
  907. ## @param module
  908. function hadoop_add_to_classpath_tools
  909. {
  910. declare module=$1
  911. if [[ -f "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh" ]]; then
  912. # shellcheck disable=SC1090
  913. . "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh"
  914. else
  915. hadoop_error "ERROR: Tools helper ${HADOOP_LIBEXEC_DIR}/tools/${module}.sh was not found."
  916. fi
  917. if declare -f hadoop_classpath_tools_${module} >/dev/null 2>&1; then
  918. "hadoop_classpath_tools_${module}"
  919. fi
  920. }
  921. ## @description Add the user's custom classpath settings to the
  922. ## @description environment
  923. ## @audience private
  924. ## @stability evolving
  925. ## @replaceable yes
  926. function hadoop_add_to_classpath_userpath
  927. {
  928. # Add the user-specified HADOOP_CLASSPATH to the
  929. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  930. # is not set.
  931. # Add it first or last depending on if user has
  932. # set env-var HADOOP_USER_CLASSPATH_FIRST
  933. # we'll also dedupe it, because we're cool like that.
  934. #
  935. declare -a array
  936. declare -i c=0
  937. declare -i j
  938. declare -i i
  939. declare idx
  940. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  941. # I wonder if Java runs on VMS.
  942. for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  943. array[${c}]=${idx}
  944. ((c=c+1))
  945. done
  946. # bats gets confused by j getting set to 0
  947. ((j=c-1)) || ${QATESTMODE}
  948. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  949. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  950. for ((i=0; i<=j; i++)); do
  951. hadoop_add_classpath "${array[$i]}" after
  952. done
  953. else
  954. for ((i=j; i>=0; i--)); do
  955. hadoop_add_classpath "${array[$i]}" before
  956. done
  957. fi
  958. fi
  959. fi
  960. }
  961. ## @description Routine to configure any OS-specific settings.
  962. ## @audience public
  963. ## @stability stable
  964. ## @replaceable yes
  965. ## @return may exit on failure conditions
  966. function hadoop_os_tricks
  967. {
  968. local bindv6only
  969. HADOOP_IS_CYGWIN=false
  970. case ${HADOOP_OS_TYPE} in
  971. Darwin)
  972. if [[ -z "${JAVA_HOME}" ]]; then
  973. if [[ -x /usr/libexec/java_home ]]; then
  974. JAVA_HOME="$(/usr/libexec/java_home)"
  975. export JAVA_HOME
  976. else
  977. JAVA_HOME=/Library/Java/Home
  978. export JAVA_HOME
  979. fi
  980. fi
  981. ;;
  982. Linux)
  983. # Newer versions of glibc use an arena memory allocator that
  984. # causes virtual # memory usage to explode. This interacts badly
  985. # with the many threads that we use in Hadoop. Tune the variable
  986. # down to prevent vmem explosion.
  987. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  988. # we put this in QA test mode off so that non-Linux can test
  989. if [[ "${QATESTMODE}" = true ]]; then
  990. return
  991. fi
  992. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  993. # undocumented in hadoop-env.sh because we don't want users to
  994. # shoot themselves in the foot while devs make IPv6 work.
  995. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  996. if [[ -n "${bindv6only}" ]] &&
  997. [[ "${bindv6only}" -eq "1" ]] &&
  998. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  999. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  1000. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  1001. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  1002. exit 1
  1003. fi
  1004. ;;
  1005. CYGWIN*)
  1006. # Flag that we're running on Cygwin to trigger path translation later.
  1007. HADOOP_IS_CYGWIN=true
  1008. ;;
  1009. esac
  1010. }
  1011. ## @description Configure/verify ${JAVA_HOME}
  1012. ## @audience public
  1013. ## @stability stable
  1014. ## @replaceable yes
  1015. ## @return may exit on failure conditions
  1016. function hadoop_java_setup
  1017. {
  1018. # Bail if we did not detect it
  1019. if [[ -z "${JAVA_HOME}" ]]; then
  1020. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  1021. exit 1
  1022. fi
  1023. if [[ ! -d "${JAVA_HOME}" ]]; then
  1024. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  1025. exit 1
  1026. fi
  1027. JAVA="${JAVA_HOME}/bin/java"
  1028. if [[ ! -x "$JAVA" ]]; then
  1029. hadoop_error "ERROR: $JAVA is not executable."
  1030. exit 1
  1031. fi
  1032. }
  1033. ## @description Finish Java JNI paths prior to execution
  1034. ## @audience private
  1035. ## @stability evolving
  1036. ## @replaceable yes
  1037. function hadoop_finalize_libpaths
  1038. {
  1039. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1040. hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
  1041. hadoop_add_param HADOOP_OPTS java.library.path \
  1042. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1043. export LD_LIBRARY_PATH
  1044. fi
  1045. }
  1046. ## @description Finish Java heap parameters prior to execution
  1047. ## @audience private
  1048. ## @stability evolving
  1049. ## @replaceable yes
  1050. function hadoop_finalize_hadoop_heap
  1051. {
  1052. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  1053. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  1054. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  1055. fi
  1056. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  1057. fi
  1058. # backwards compatibility
  1059. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  1060. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  1061. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  1062. fi
  1063. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  1064. fi
  1065. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  1066. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  1067. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  1068. fi
  1069. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  1070. fi
  1071. }
  1072. ## @description Converts the contents of the variable name
  1073. ## @description `varnameref` into the equivalent Windows path.
  1074. ## @description If the second parameter is true, then `varnameref`
  1075. ## @description is treated as though it was a path list.
  1076. ## @audience public
  1077. ## @stability stable
  1078. ## @replaceable yes
  1079. ## @param varnameref
  1080. ## @param [true]
  1081. function hadoop_translate_cygwin_path
  1082. {
  1083. if [[ "${HADOOP_IS_CYGWIN}" = "true" ]]; then
  1084. if [[ "$2" = "true" ]]; then
  1085. #shellcheck disable=SC2016
  1086. eval "$1"='$(cygpath -p -w "${!1}" 2>/dev/null)'
  1087. else
  1088. #shellcheck disable=SC2016
  1089. eval "$1"='$(cygpath -w "${!1}" 2>/dev/null)'
  1090. fi
  1091. fi
  1092. }
  1093. ## @description Finish configuring Hadoop specific system properties
  1094. ## @description prior to executing Java
  1095. ## @audience private
  1096. ## @stability evolving
  1097. ## @replaceable yes
  1098. function hadoop_finalize_hadoop_opts
  1099. {
  1100. hadoop_translate_cygwin_path HADOOP_LOG_DIR
  1101. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  1102. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  1103. HADOOP_HOME=${HADOOP_PREFIX}
  1104. hadoop_translate_cygwin_path HADOOP_HOME
  1105. export HADOOP_HOME
  1106. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1107. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  1108. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  1109. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  1110. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  1111. }
  1112. ## @description Finish Java classpath prior to execution
  1113. ## @audience private
  1114. ## @stability evolving
  1115. ## @replaceable yes
  1116. function hadoop_finalize_classpath
  1117. {
  1118. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  1119. # user classpath gets added at the last minute. this allows
  1120. # override of CONF dirs and more
  1121. hadoop_add_to_classpath_userpath
  1122. hadoop_translate_cygwin_path CLASSPATH true
  1123. }
  1124. ## @description Finish Catalina configuration prior to execution
  1125. ## @audience private
  1126. ## @stability evolving
  1127. ## @replaceable yes
  1128. function hadoop_finalize_catalina_opts
  1129. {
  1130. local prefix=${HADOOP_CATALINA_PREFIX}
  1131. hadoop_add_param CATALINA_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_PREFIX}"
  1132. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1133. hadoop_add_param CATALINA_OPTS java.library.path "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1134. fi
  1135. hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" "-D${prefix}.home.dir=${HADOOP_PREFIX}"
  1136. hadoop_add_param CATALINA_OPTS "${prefix}.config.dir" "-D${prefix}.config.dir=${HADOOP_CATALINA_CONFIG}"
  1137. hadoop_add_param CATALINA_OPTS "${prefix}.log.dir" "-D${prefix}.log.dir=${HADOOP_CATALINA_LOG}"
  1138. hadoop_add_param CATALINA_OPTS "${prefix}.temp.dir" "-D${prefix}.temp.dir=${HADOOP_CATALINA_TEMP}"
  1139. hadoop_add_param CATALINA_OPTS "${prefix}.admin.port" "-D${prefix}.admin.port=${HADOOP_CATALINA_ADMIN_PORT}"
  1140. hadoop_add_param CATALINA_OPTS "${prefix}.http.port" "-D${prefix}.http.port=${HADOOP_CATALINA_HTTP_PORT}"
  1141. hadoop_add_param CATALINA_OPTS "${prefix}.max.threads" "-D${prefix}.max.threads=${HADOOP_CATALINA_MAX_THREADS}"
  1142. hadoop_add_param CATALINA_OPTS "${prefix}.max.http.header.size" "-D${prefix}.max.http.header.size=${HADOOP_CATALINA_MAX_HTTP_HEADER_SIZE}"
  1143. hadoop_add_param CATALINA_OPTS "${prefix}.ssl.keystore.file" "-D${prefix}.ssl.keystore.file=${HADOOP_CATALINA_SSL_KEYSTORE_FILE}"
  1144. }
  1145. ## @description Finish all the remaining environment settings prior
  1146. ## @description to executing Java. This is a wrapper that calls
  1147. ## @description the other `finalize` routines.
  1148. ## @audience private
  1149. ## @stability evolving
  1150. ## @replaceable yes
  1151. function hadoop_finalize
  1152. {
  1153. hadoop_shellprofiles_finalize
  1154. hadoop_finalize_classpath
  1155. hadoop_finalize_libpaths
  1156. hadoop_finalize_hadoop_heap
  1157. hadoop_finalize_hadoop_opts
  1158. hadoop_translate_cygwin_path HADOOP_PREFIX
  1159. hadoop_translate_cygwin_path HADOOP_CONF_DIR
  1160. hadoop_translate_cygwin_path HADOOP_COMMON_HOME
  1161. hadoop_translate_cygwin_path HADOOP_HDFS_HOME
  1162. hadoop_translate_cygwin_path HADOOP_YARN_HOME
  1163. hadoop_translate_cygwin_path HADOOP_MAPRED_HOME
  1164. }
  1165. ## @description Print usage information and exit with the passed
  1166. ## @description `exitcode`
  1167. ## @audience public
  1168. ## @stability stable
  1169. ## @replaceable no
  1170. ## @param exitcode
  1171. ## @return This function will always exit.
  1172. function hadoop_exit_with_usage
  1173. {
  1174. local exitcode=$1
  1175. if [[ -z $exitcode ]]; then
  1176. exitcode=1
  1177. fi
  1178. # shellcheck disable=SC2034
  1179. if declare -F hadoop_usage >/dev/null ; then
  1180. hadoop_usage
  1181. elif [[ -x /usr/bin/cowsay ]]; then
  1182. /usr/bin/cowsay -f elephant "Sorry, no help available."
  1183. else
  1184. hadoop_error "Sorry, no help available."
  1185. fi
  1186. exit $exitcode
  1187. }
  1188. ## @description Verify that prerequisites have been met prior to
  1189. ## @description excuting a privileged program.
  1190. ## @audience private
  1191. ## @stability evolving
  1192. ## @replaceable yes
  1193. ## @return This routine may exit.
  1194. function hadoop_verify_secure_prereq
  1195. {
  1196. # if you are on an OS like Illumos that has functional roles
  1197. # and you are using pfexec, you'll probably want to change
  1198. # this.
  1199. # ${EUID} comes from the shell itself!
  1200. if [[ "${EUID}" -ne 0 ]] && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  1201. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  1202. exit 1
  1203. else
  1204. return 0
  1205. fi
  1206. }
  1207. ## @audience private
  1208. ## @stability evolving
  1209. ## @replaceable yes
  1210. function hadoop_setup_secure_service
  1211. {
  1212. # need a more complicated setup? replace me!
  1213. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  1214. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  1215. }
  1216. ## @audience private
  1217. ## @stability evolving
  1218. ## @replaceable yes
  1219. function hadoop_verify_piddir
  1220. {
  1221. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  1222. hadoop_error "No pid directory defined."
  1223. exit 1
  1224. fi
  1225. if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
  1226. hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
  1227. mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
  1228. if [[ $? -gt 0 ]]; then
  1229. hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
  1230. exit 1
  1231. fi
  1232. fi
  1233. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1234. if [[ $? -gt 0 ]]; then
  1235. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  1236. exit 1
  1237. fi
  1238. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1239. }
  1240. ## @audience private
  1241. ## @stability evolving
  1242. ## @replaceable yes
  1243. function hadoop_verify_logdir
  1244. {
  1245. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  1246. hadoop_error "No log directory defined."
  1247. exit 1
  1248. fi
  1249. if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
  1250. hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
  1251. mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
  1252. if [[ $? -gt 0 ]]; then
  1253. hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
  1254. exit 1
  1255. fi
  1256. fi
  1257. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1258. if [[ $? -gt 0 ]]; then
  1259. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  1260. exit 1
  1261. fi
  1262. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1263. }
  1264. ## @description Determine the status of the daemon referenced
  1265. ## @description by `pidfile`
  1266. ## @audience public
  1267. ## @stability stable
  1268. ## @replaceable yes
  1269. ## @param pidfile
  1270. ## @return (mostly) LSB 4.1.0 compatible status
  1271. function hadoop_status_daemon
  1272. {
  1273. #
  1274. # LSB 4.1.0 compatible status command (1)
  1275. #
  1276. # 0 = program is running
  1277. # 1 = dead, but still a pid (2)
  1278. # 2 = (not used by us)
  1279. # 3 = not running
  1280. #
  1281. # 1 - this is not an endorsement of the LSB
  1282. #
  1283. # 2 - technically, the specification says /var/run/pid, so
  1284. # we should never return this value, but we're giving
  1285. # them the benefit of a doubt and returning 1 even if
  1286. # our pid is not in in /var/run .
  1287. #
  1288. local pidfile=$1
  1289. shift
  1290. local pid
  1291. if [[ -f "${pidfile}" ]]; then
  1292. pid=$(cat "${pidfile}")
  1293. if ps -p "${pid}" > /dev/null 2>&1; then
  1294. return 0
  1295. fi
  1296. return 1
  1297. fi
  1298. return 3
  1299. }
  1300. ## @description Execute the Java `class`, passing along any `options`.
  1301. ## @description Additionally, set the Java property -Dproc_`command`.
  1302. ## @audience public
  1303. ## @stability stable
  1304. ## @replaceable yes
  1305. ## @param command
  1306. ## @param class
  1307. ## @param [options]
  1308. function hadoop_java_exec
  1309. {
  1310. # run a java command. this is used for
  1311. # non-daemons
  1312. local command=$1
  1313. local class=$2
  1314. shift 2
  1315. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1316. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1317. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1318. hadoop_debug "java: ${JAVA}"
  1319. hadoop_debug "Class name: ${class}"
  1320. hadoop_debug "Command line options: $*"
  1321. export CLASSPATH
  1322. #shellcheck disable=SC2086
  1323. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1324. }
  1325. ## @description Start a non-privileged daemon in the foreground.
  1326. ## @audience private
  1327. ## @stability evolving
  1328. ## @replaceable yes
  1329. ## @param command
  1330. ## @param class
  1331. ## @param pidfile
  1332. ## @param [options]
  1333. function hadoop_start_daemon
  1334. {
  1335. # this is our non-privileged daemon starter
  1336. # that fires up a daemon in the *foreground*
  1337. # so complex! so wow! much java!
  1338. local command=$1
  1339. local class=$2
  1340. local pidfile=$3
  1341. shift 3
  1342. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1343. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1344. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1345. hadoop_debug "java: ${JAVA}"
  1346. hadoop_debug "Class name: ${class}"
  1347. hadoop_debug "Command line options: $*"
  1348. # this is for the non-daemon pid creation
  1349. #shellcheck disable=SC2086
  1350. echo $$ > "${pidfile}" 2>/dev/null
  1351. if [[ $? -gt 0 ]]; then
  1352. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  1353. fi
  1354. export CLASSPATH
  1355. #shellcheck disable=SC2086
  1356. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1357. }
  1358. ## @description Start a non-privileged daemon in the background.
  1359. ## @audience private
  1360. ## @stability evolving
  1361. ## @replaceable yes
  1362. ## @param command
  1363. ## @param class
  1364. ## @param pidfile
  1365. ## @param outfile
  1366. ## @param [options]
  1367. function hadoop_start_daemon_wrapper
  1368. {
  1369. local daemonname=$1
  1370. local class=$2
  1371. local pidfile=$3
  1372. local outfile=$4
  1373. shift 4
  1374. local counter
  1375. hadoop_rotate_log "${outfile}"
  1376. hadoop_start_daemon "${daemonname}" \
  1377. "$class" \
  1378. "${pidfile}" \
  1379. "$@" >> "${outfile}" 2>&1 < /dev/null &
  1380. # we need to avoid a race condition here
  1381. # so let's wait for the fork to finish
  1382. # before overriding with the daemonized pid
  1383. (( counter=0 ))
  1384. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  1385. sleep 1
  1386. (( counter++ ))
  1387. done
  1388. # this is for daemon pid creation
  1389. #shellcheck disable=SC2086
  1390. echo $! > "${pidfile}" 2>/dev/null
  1391. if [[ $? -gt 0 ]]; then
  1392. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  1393. fi
  1394. # shellcheck disable=SC2086
  1395. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1396. if [[ $? -gt 0 ]]; then
  1397. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1398. fi
  1399. # shellcheck disable=SC2086
  1400. disown %+ >/dev/null 2>&1
  1401. if [[ $? -gt 0 ]]; then
  1402. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1403. fi
  1404. sleep 1
  1405. # capture the ulimit output
  1406. ulimit -a >> "${outfile}" 2>&1
  1407. # shellcheck disable=SC2086
  1408. if ! ps -p $! >/dev/null 2>&1; then
  1409. return 1
  1410. fi
  1411. return 0
  1412. }
  1413. ## @description Start a privileged daemon in the foreground.
  1414. ## @audience private
  1415. ## @stability evolving
  1416. ## @replaceable yes
  1417. ## @param command
  1418. ## @param class
  1419. ## @param daemonpidfile
  1420. ## @param daemonoutfile
  1421. ## @param daemonerrfile
  1422. ## @param wrapperpidfile
  1423. ## @param [options]
  1424. function hadoop_start_secure_daemon
  1425. {
  1426. # this is used to launch a secure daemon in the *foreground*
  1427. #
  1428. local daemonname=$1
  1429. local class=$2
  1430. # pid file to create for our daemon
  1431. local daemonpidfile=$3
  1432. # where to send stdout. jsvc has bad habits so this *may* be &1
  1433. # which means you send it to stdout!
  1434. local daemonoutfile=$4
  1435. # where to send stderr. same thing, except &2 = stderr
  1436. local daemonerrfile=$5
  1437. local privpidfile=$6
  1438. shift 6
  1439. hadoop_rotate_log "${daemonoutfile}"
  1440. hadoop_rotate_log "${daemonerrfile}"
  1441. # shellcheck disable=SC2153
  1442. jsvc="${JSVC_HOME}/jsvc"
  1443. if [[ ! -f "${jsvc}" ]]; then
  1444. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  1445. hadoop_error "or privileged daemons. Please download and install jsvc from "
  1446. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  1447. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  1448. exit 1
  1449. fi
  1450. # note that shellcheck will throw a
  1451. # bogus for-our-use-case 2086 here.
  1452. # it doesn't properly support multi-line situations
  1453. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1454. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1455. hadoop_debug "Final JSVC_HOME: ${JSVC_HOME}"
  1456. hadoop_debug "jsvc: ${jsvc}"
  1457. hadoop_debug "Class name: ${class}"
  1458. hadoop_debug "Command line options: $*"
  1459. #shellcheck disable=SC2086
  1460. echo $$ > "${privpidfile}" 2>/dev/null
  1461. if [[ $? -gt 0 ]]; then
  1462. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  1463. fi
  1464. # shellcheck disable=SC2086
  1465. exec "${jsvc}" \
  1466. "-Dproc_${daemonname}" \
  1467. -outfile "${daemonoutfile}" \
  1468. -errfile "${daemonerrfile}" \
  1469. -pidfile "${daemonpidfile}" \
  1470. -nodetach \
  1471. -user "${HADOOP_SECURE_USER}" \
  1472. -cp "${CLASSPATH}" \
  1473. ${HADOOP_OPTS} \
  1474. "${class}" "$@"
  1475. }
  1476. ## @description Start a privileged daemon in the background.
  1477. ## @audience private
  1478. ## @stability evolving
  1479. ## @replaceable yes
  1480. ## @param command
  1481. ## @param class
  1482. ## @param daemonpidfile
  1483. ## @param daemonoutfile
  1484. ## @param wrapperpidfile
  1485. ## @param warpperoutfile
  1486. ## @param daemonerrfile
  1487. ## @param [options]
  1488. function hadoop_start_secure_daemon_wrapper
  1489. {
  1490. # this wraps hadoop_start_secure_daemon to take care
  1491. # of the dirty work to launch a daemon in the background!
  1492. local daemonname=$1
  1493. local class=$2
  1494. # same rules as hadoop_start_secure_daemon except we
  1495. # have some additional parameters
  1496. local daemonpidfile=$3
  1497. local daemonoutfile=$4
  1498. # the pid file of the subprocess that spawned our
  1499. # secure launcher
  1500. local jsvcpidfile=$5
  1501. # the output of the subprocess that spawned our secure
  1502. # launcher
  1503. local jsvcoutfile=$6
  1504. local daemonerrfile=$7
  1505. shift 7
  1506. local counter
  1507. hadoop_rotate_log "${jsvcoutfile}"
  1508. hadoop_start_secure_daemon \
  1509. "${daemonname}" \
  1510. "${class}" \
  1511. "${daemonpidfile}" \
  1512. "${daemonoutfile}" \
  1513. "${daemonerrfile}" \
  1514. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  1515. # we need to avoid a race condition here
  1516. # so let's wait for the fork to finish
  1517. # before overriding with the daemonized pid
  1518. (( counter=0 ))
  1519. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  1520. sleep 1
  1521. (( counter++ ))
  1522. done
  1523. # this is for the daemon pid creation
  1524. #shellcheck disable=SC2086
  1525. echo $! > "${jsvcpidfile}" 2>/dev/null
  1526. if [[ $? -gt 0 ]]; then
  1527. hadoop_error "ERROR: Cannot write ${daemonname} pid ${daemonpidfile}."
  1528. fi
  1529. sleep 1
  1530. #shellcheck disable=SC2086
  1531. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1532. if [[ $? -gt 0 ]]; then
  1533. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1534. fi
  1535. if [[ -f "${daemonpidfile}" ]]; then
  1536. #shellcheck disable=SC2046
  1537. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  1538. if [[ $? -gt 0 ]]; then
  1539. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  1540. fi
  1541. fi
  1542. #shellcheck disable=SC2046
  1543. disown %+ >/dev/null 2>&1
  1544. if [[ $? -gt 0 ]]; then
  1545. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1546. fi
  1547. # capture the ulimit output
  1548. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  1549. #shellcheck disable=SC2086
  1550. if ! ps -p $! >/dev/null 2>&1; then
  1551. return 1
  1552. fi
  1553. return 0
  1554. }
  1555. ## @description Stop the non-privileged `command` daemon with that
  1556. ## @description that is running at `pidfile`.
  1557. ## @audience public
  1558. ## @stability stable
  1559. ## @replaceable yes
  1560. ## @param command
  1561. ## @param pidfile
  1562. function hadoop_stop_daemon
  1563. {
  1564. local cmd=$1
  1565. local pidfile=$2
  1566. shift 2
  1567. local pid
  1568. local cur_pid
  1569. if [[ -f "${pidfile}" ]]; then
  1570. pid=$(cat "$pidfile")
  1571. kill "${pid}" >/dev/null 2>&1
  1572. sleep "${HADOOP_STOP_TIMEOUT}"
  1573. if kill -0 "${pid}" > /dev/null 2>&1; then
  1574. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  1575. kill -9 "${pid}" >/dev/null 2>&1
  1576. fi
  1577. if ps -p "${pid}" > /dev/null 2>&1; then
  1578. hadoop_error "ERROR: Unable to kill ${pid}"
  1579. else
  1580. cur_pid=$(cat "$pidfile")
  1581. if [[ "${pid}" = "${cur_pid}" ]]; then
  1582. rm -f "${pidfile}" >/dev/null 2>&1
  1583. else
  1584. hadoop_error "WARNING: pid has changed for ${cmd}, skip deleting pid file"
  1585. fi
  1586. fi
  1587. fi
  1588. }
  1589. ## @description Stop the privileged `command` daemon with that
  1590. ## @description that is running at `daemonpidfile` and launched with
  1591. ## @description the wrapper at `wrapperpidfile`.
  1592. ## @audience public
  1593. ## @stability stable
  1594. ## @replaceable yes
  1595. ## @param command
  1596. ## @param daemonpidfile
  1597. ## @param wrapperpidfile
  1598. function hadoop_stop_secure_daemon
  1599. {
  1600. local command=$1
  1601. local daemonpidfile=$2
  1602. local privpidfile=$3
  1603. shift 3
  1604. local ret
  1605. local daemon_pid
  1606. local priv_pid
  1607. local cur_daemon_pid
  1608. local cur_priv_pid
  1609. daemon_pid=$(cat "$daemonpidfile")
  1610. priv_pid=$(cat "$privpidfile")
  1611. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  1612. ret=$?
  1613. cur_daemon_pid=$(cat "$daemonpidfile")
  1614. cur_priv_pid=$(cat "$privpidfile")
  1615. if [[ "${daemon_pid}" = "${cur_daemon_pid}" ]]; then
  1616. rm -f "${daemonpidfile}" >/dev/null 2>&1
  1617. else
  1618. hadoop_error "WARNING: daemon pid has changed for ${command}, skip deleting daemon pid file"
  1619. fi
  1620. if [[ "${priv_pid}" = "${cur_priv_pid}" ]]; then
  1621. rm -f "${privpidfile}" >/dev/null 2>&1
  1622. else
  1623. hadoop_error "WARNING: priv pid has changed for ${command}, skip deleting priv pid file"
  1624. fi
  1625. return ${ret}
  1626. }
  1627. ## @description Manage a non-privileged daemon.
  1628. ## @audience private
  1629. ## @stability evolving
  1630. ## @replaceable yes
  1631. ## @param [start|stop|status|default]
  1632. ## @param command
  1633. ## @param class
  1634. ## @param daemonpidfile
  1635. ## @param daemonoutfile
  1636. ## @param [options]
  1637. function hadoop_daemon_handler
  1638. {
  1639. local daemonmode=$1
  1640. local daemonname=$2
  1641. local class=$3
  1642. local daemon_pidfile=$4
  1643. local daemon_outfile=$5
  1644. shift 5
  1645. case ${daemonmode} in
  1646. status)
  1647. hadoop_status_daemon "${daemon_pidfile}"
  1648. exit $?
  1649. ;;
  1650. stop)
  1651. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  1652. exit $?
  1653. ;;
  1654. ##COMPAT -- older hadoops would also start daemons by default
  1655. start|default)
  1656. hadoop_verify_piddir
  1657. hadoop_verify_logdir
  1658. hadoop_status_daemon "${daemon_pidfile}"
  1659. if [[ $? == 0 ]]; then
  1660. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1661. exit 1
  1662. else
  1663. # stale pid file, so just remove it and continue on
  1664. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1665. fi
  1666. ##COMPAT - differenticate between --daemon start and nothing
  1667. # "nothing" shouldn't detach
  1668. if [[ "$daemonmode" = "default" ]]; then
  1669. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  1670. else
  1671. hadoop_start_daemon_wrapper "${daemonname}" \
  1672. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  1673. fi
  1674. ;;
  1675. esac
  1676. }
  1677. ## @description Manage a privileged daemon.
  1678. ## @audience private
  1679. ## @stability evolving
  1680. ## @replaceable yes
  1681. ## @param [start|stop|status|default]
  1682. ## @param command
  1683. ## @param class
  1684. ## @param daemonpidfile
  1685. ## @param daemonoutfile
  1686. ## @param wrapperpidfile
  1687. ## @param wrapperoutfile
  1688. ## @param wrappererrfile
  1689. ## @param [options]
  1690. function hadoop_secure_daemon_handler
  1691. {
  1692. local daemonmode=$1
  1693. local daemonname=$2
  1694. local classname=$3
  1695. local daemon_pidfile=$4
  1696. local daemon_outfile=$5
  1697. local priv_pidfile=$6
  1698. local priv_outfile=$7
  1699. local priv_errfile=$8
  1700. shift 8
  1701. case ${daemonmode} in
  1702. status)
  1703. hadoop_status_daemon "${daemon_pidfile}"
  1704. exit $?
  1705. ;;
  1706. stop)
  1707. hadoop_stop_secure_daemon "${daemonname}" \
  1708. "${daemon_pidfile}" "${priv_pidfile}"
  1709. exit $?
  1710. ;;
  1711. ##COMPAT -- older hadoops would also start daemons by default
  1712. start|default)
  1713. hadoop_verify_piddir
  1714. hadoop_verify_logdir
  1715. hadoop_status_daemon "${daemon_pidfile}"
  1716. if [[ $? == 0 ]]; then
  1717. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1718. exit 1
  1719. else
  1720. # stale pid file, so just remove it and continue on
  1721. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1722. fi
  1723. ##COMPAT - differenticate between --daemon start and nothing
  1724. # "nothing" shouldn't detach
  1725. if [[ "${daemonmode}" = "default" ]]; then
  1726. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  1727. "${daemon_pidfile}" "${daemon_outfile}" \
  1728. "${priv_errfile}" "${priv_pidfile}" "$@"
  1729. else
  1730. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  1731. "${daemon_pidfile}" "${daemon_outfile}" \
  1732. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  1733. fi
  1734. ;;
  1735. esac
  1736. }
  1737. ## @description Verify that ${USER} is allowed to execute the
  1738. ## @description given subcommand.
  1739. ## @audience public
  1740. ## @stability stable
  1741. ## @replaceable yes
  1742. ## @param subcommand
  1743. ## @return will exit on failure conditions
  1744. function hadoop_verify_user
  1745. {
  1746. local command=$1
  1747. local uservar="HADOOP_${command}_USER"
  1748. if [[ -n ${!uservar} ]]; then
  1749. if [[ ${!uservar} != "${USER}" ]]; then
  1750. hadoop_error "ERROR: ${command} can only be executed by ${!uservar}."
  1751. exit 1
  1752. fi
  1753. fi
  1754. }
  1755. ## @description Perform the 'hadoop classpath', etc subcommand with the given
  1756. ## @description parameters
  1757. ## @audience private
  1758. ## @stability evolving
  1759. ## @replaceable yes
  1760. ## @param [parameters]
  1761. ## @return will print & exit with no params
  1762. function hadoop_do_classpath_subcommand
  1763. {
  1764. if [[ "$#" -gt 1 ]]; then
  1765. eval "$1"=org.apache.hadoop.util.Classpath
  1766. else
  1767. hadoop_finalize
  1768. echo "${CLASSPATH}"
  1769. exit 0
  1770. fi
  1771. }
  1772. ## @description generic shell script opton parser. sets
  1773. ## @description HADOOP_PARSE_COUNTER to set number the
  1774. ## @description caller should shift
  1775. ## @audience private
  1776. ## @stability evolving
  1777. ## @replaceable yes
  1778. ## @param [parameters, typically "$@"]
  1779. function hadoop_parse_args
  1780. {
  1781. HADOOP_DAEMON_MODE="default"
  1782. HADOOP_PARSE_COUNTER=0
  1783. # not all of the options supported here are supported by all commands
  1784. # however these are:
  1785. hadoop_add_option "--config dir" "Hadoop config directory"
  1786. hadoop_add_option "--debug" "turn on shell script debug mode"
  1787. hadoop_add_option "--help" "usage information"
  1788. while true; do
  1789. hadoop_debug "hadoop_parse_args: processing $1"
  1790. case $1 in
  1791. --buildpaths)
  1792. # shellcheck disable=SC2034
  1793. HADOOP_ENABLE_BUILD_PATHS=true
  1794. shift
  1795. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1796. ;;
  1797. --config)
  1798. shift
  1799. confdir=$1
  1800. shift
  1801. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1802. if [[ -d "${confdir}" ]]; then
  1803. # shellcheck disable=SC2034
  1804. HADOOP_CONF_DIR="${confdir}"
  1805. elif [[ -z "${confdir}" ]]; then
  1806. hadoop_error "ERROR: No parameter provided for --config "
  1807. hadoop_exit_with_usage 1
  1808. else
  1809. hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
  1810. hadoop_exit_with_usage 1
  1811. fi
  1812. ;;
  1813. --daemon)
  1814. shift
  1815. HADOOP_DAEMON_MODE=$1
  1816. shift
  1817. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1818. if [[ -z "${HADOOP_DAEMON_MODE}" || \
  1819. ! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
  1820. hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
  1821. hadoop_exit_with_usage 1
  1822. fi
  1823. ;;
  1824. --debug)
  1825. shift
  1826. # shellcheck disable=SC2034
  1827. HADOOP_SHELL_SCRIPT_DEBUG=true
  1828. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1829. ;;
  1830. --help|-help|-h|help|--h|--\?|-\?|\?)
  1831. hadoop_exit_with_usage 0
  1832. ;;
  1833. --hostnames)
  1834. shift
  1835. # shellcheck disable=SC2034
  1836. HADOOP_SLAVE_NAMES="$1"
  1837. shift
  1838. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1839. ;;
  1840. --hosts)
  1841. shift
  1842. hadoop_populate_slaves_file "$1"
  1843. shift
  1844. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1845. ;;
  1846. --loglevel)
  1847. shift
  1848. # shellcheck disable=SC2034
  1849. HADOOP_LOGLEVEL="$1"
  1850. shift
  1851. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1852. ;;
  1853. --slaves)
  1854. shift
  1855. # shellcheck disable=SC2034
  1856. HADOOP_SLAVE_MODE=true
  1857. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1858. ;;
  1859. *)
  1860. break
  1861. ;;
  1862. esac
  1863. done
  1864. hadoop_debug "hadoop_parse: asking caller to skip ${HADOOP_PARSE_COUNTER}"
  1865. }