hadoop-functions.sh 55 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # we need to declare this globally as an array, which can only
  17. # be done outside of a function
  18. declare -a HADOOP_SUBCMD_USAGE
  19. declare -a HADOOP_OPTION_USAGE
  20. ## @description Print a message to stderr
  21. ## @audience public
  22. ## @stability stable
  23. ## @replaceable no
  24. ## @param string
  25. function hadoop_error
  26. {
  27. echo "$*" 1>&2
  28. }
  29. ## @description Print a message to stderr if --debug is turned on
  30. ## @audience public
  31. ## @stability stable
  32. ## @replaceable no
  33. ## @param string
  34. function hadoop_debug
  35. {
  36. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  37. echo "DEBUG: $*" 1>&2
  38. fi
  39. }
  40. ## @description Add a subcommand to the usage output
  41. ## @audience private
  42. ## @stability evolving
  43. ## @replaceable no
  44. ## @param subcommand
  45. ## @param subcommanddesc
  46. function hadoop_add_subcommand
  47. {
  48. local subcmd=$1
  49. local text=$2
  50. HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${text}"
  51. ((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
  52. }
  53. ## @description Add an option to the usage output
  54. ## @audience private
  55. ## @stability evolving
  56. ## @replaceable no
  57. ## @param subcommand
  58. ## @param subcommanddesc
  59. function hadoop_add_option
  60. {
  61. local option=$1
  62. local text=$2
  63. HADOOP_OPTION_USAGE[${HADOOP_OPTION_USAGE_COUNTER}]="${option}@${text}"
  64. ((HADOOP_OPTION_USAGE_COUNTER=HADOOP_OPTION_USAGE_COUNTER+1))
  65. }
  66. ## @description Reset the usage information to blank
  67. ## @audience private
  68. ## @stability evolving
  69. ## @replaceable no
  70. function hadoop_reset_usage
  71. {
  72. HADOOP_SUBCMD_USAGE=()
  73. HADOOP_OPTION_USAGE=()
  74. HADOOP_SUBCMD_USAGE_COUNTER=0
  75. HADOOP_OPTION_USAGE_COUNTER=0
  76. }
  77. ## @description Print a screen-size aware two-column output
  78. ## @audience private
  79. ## @stability evolving
  80. ## @replaceable no
  81. ## @param array
  82. function hadoop_generic_columnprinter
  83. {
  84. declare -a input=("$@")
  85. declare -i i=0
  86. declare -i counter=0
  87. declare line
  88. declare text
  89. declare option
  90. declare giventext
  91. declare -i maxoptsize
  92. declare -i foldsize
  93. declare -a tmpa
  94. declare numcols
  95. if [[ -n "${COLUMNS}" ]]; then
  96. numcols=${COLUMNS}
  97. else
  98. numcols=$(tput cols) 2>/dev/null
  99. fi
  100. if [[ -z "${numcols}"
  101. || ! "${numcols}" =~ ^[0-9]+$ ]]; then
  102. numcols=75
  103. else
  104. ((numcols=numcols-5))
  105. fi
  106. while read -r line; do
  107. tmpa[${counter}]=${line}
  108. ((counter=counter+1))
  109. option=$(echo "${line}" | cut -f1 -d'@')
  110. if [[ ${#option} -gt ${maxoptsize} ]]; then
  111. maxoptsize=${#option}
  112. fi
  113. done < <(for text in "${input[@]}"; do
  114. echo "${text}"
  115. done | sort)
  116. i=0
  117. ((foldsize=numcols-maxoptsize))
  118. until [[ $i -eq ${#tmpa[@]} ]]; do
  119. option=$(echo "${tmpa[$i]}" | cut -f1 -d'@')
  120. giventext=$(echo "${tmpa[$i]}" | cut -f2 -d'@')
  121. while read -r line; do
  122. printf "%-${maxoptsize}s %-s\n" "${option}" "${line}"
  123. option=" "
  124. done < <(echo "${giventext}"| fold -s -w ${foldsize})
  125. ((i=i+1))
  126. done
  127. }
  128. ## @description generate standard usage output
  129. ## @description and optionally takes a class
  130. ## @audience private
  131. ## @stability evolving
  132. ## @replaceable no
  133. ## @param execname
  134. ## @param true|false
  135. ## @param [text to use in place of SUBCOMMAND]
  136. function hadoop_generate_usage
  137. {
  138. local cmd=$1
  139. local takesclass=$2
  140. local subcmdtext=${3:-"SUBCOMMAND"}
  141. local haveoptions
  142. local optstring
  143. local havesubs
  144. local subcmdstring
  145. cmd=${cmd##*/}
  146. if [[ -n "${HADOOP_OPTION_USAGE_COUNTER}"
  147. && "${HADOOP_OPTION_USAGE_COUNTER}" -gt 0 ]]; then
  148. haveoptions=true
  149. optstring=" [OPTIONS]"
  150. fi
  151. if [[ -n "${HADOOP_SUBCMD_USAGE_COUNTER}"
  152. && "${HADOOP_SUBCMD_USAGE_COUNTER}" -gt 0 ]]; then
  153. havesubs=true
  154. subcmdstring=" ${subcmdtext} [${subcmdtext} OPTIONS]"
  155. fi
  156. echo "Usage: ${cmd}${optstring}${subcmdstring}"
  157. if [[ ${takesclass} = true ]]; then
  158. echo " or ${cmd}${optstring} CLASSNAME [CLASSNAME OPTIONS]"
  159. echo " where CLASSNAME is a user-provided Java class"
  160. fi
  161. if [[ "${haveoptions}" = true ]]; then
  162. echo ""
  163. echo " OPTIONS is none or any of:"
  164. echo ""
  165. hadoop_generic_columnprinter "${HADOOP_OPTION_USAGE[@]}"
  166. fi
  167. if [[ "${havesubs}" = true ]]; then
  168. echo ""
  169. echo " ${subcmdtext} is one of:"
  170. echo ""
  171. hadoop_generic_columnprinter "${HADOOP_SUBCMD_USAGE[@]}"
  172. echo ""
  173. echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
  174. fi
  175. }
  176. ## @description Replace `oldvar` with `newvar` if `oldvar` exists.
  177. ## @audience public
  178. ## @stability stable
  179. ## @replaceable yes
  180. ## @param oldvar
  181. ## @param newvar
  182. function hadoop_deprecate_envvar
  183. {
  184. local oldvar=$1
  185. local newvar=$2
  186. local oldval=${!oldvar}
  187. local newval=${!newvar}
  188. if [[ -n "${oldval}" ]]; then
  189. hadoop_error "WARNING: ${oldvar} has been replaced by ${newvar}. Using value of ${oldvar}."
  190. # shellcheck disable=SC2086
  191. eval ${newvar}=\"${oldval}\"
  192. # shellcheck disable=SC2086
  193. newval=${oldval}
  194. # shellcheck disable=SC2086
  195. eval ${newvar}=\"${newval}\"
  196. fi
  197. }
  198. ## @description Bootstraps the Hadoop shell environment
  199. ## @audience private
  200. ## @stability evolving
  201. ## @replaceable no
  202. function hadoop_bootstrap
  203. {
  204. # the root of the Hadoop installation
  205. # See HADOOP-6255 for the expected directory structure layout
  206. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  207. # We can piggyback off of that to figure out where the default
  208. # HADOOP_FREFIX should be. This allows us to run without
  209. # HADOOP_PREFIX ever being defined by a human! As a consequence
  210. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  211. # env var within Hadoop.
  212. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  213. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  214. exit 1
  215. fi
  216. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  217. HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_DEFAULT_PREFIX}
  218. export HADOOP_PREFIX
  219. #
  220. # short-cuts. vendors may redefine these as well, preferably
  221. # in hadoop-layouts.sh
  222. #
  223. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  224. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  225. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  226. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  227. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  228. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  229. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  230. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  231. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  232. # setup a default TOOL_PATH
  233. TOOL_PATH=${TOOL_PATH:-${HADOOP_PREFIX}/share/hadoop/tools/lib/*}
  234. # usage output set to zero
  235. hadoop_reset_usage
  236. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  237. # defaults
  238. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  239. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  240. }
  241. ## @description Locate Hadoop's configuration directory
  242. ## @audience private
  243. ## @stability evolving
  244. ## @replaceable no
  245. function hadoop_find_confdir
  246. {
  247. local conf_dir
  248. # An attempt at compatibility with some Hadoop 1.x
  249. # installs.
  250. if [[ -e "${HADOOP_PREFIX}/conf/hadoop-env.sh" ]]; then
  251. conf_dir="conf"
  252. else
  253. conf_dir="etc/hadoop"
  254. fi
  255. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_PREFIX}/${conf_dir}}"
  256. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  257. }
  258. ## @description Validate ${HADOOP_CONF_DIR}
  259. ## @audience public
  260. ## @stability stable
  261. ## @replaceable yes
  262. ## @return will exit on failure conditions
  263. function hadoop_verify_confdir
  264. {
  265. # Check only log4j.properties by default.
  266. # --loglevel does not work without logger settings in log4j.log4j.properties.
  267. if [[ ! -f "${HADOOP_CONF_DIR}/log4j.properties" ]]; then
  268. hadoop_error "WARNING: log4j.properties is not found. HADOOP_CONF_DIR may be incomplete."
  269. fi
  270. }
  271. ## @description Import the hadoop-env.sh settings
  272. ## @audience private
  273. ## @stability evolving
  274. ## @replaceable no
  275. function hadoop_exec_hadoopenv
  276. {
  277. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  278. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  279. export HADOOP_ENV_PROCESSED=true
  280. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  281. fi
  282. fi
  283. }
  284. ## @description Import the replaced functions
  285. ## @audience private
  286. ## @stability evolving
  287. ## @replaceable no
  288. function hadoop_exec_userfuncs
  289. {
  290. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  291. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  292. fi
  293. }
  294. ## @description Read the user's settings. This provides for users to
  295. ## @description override and/or append hadoop-env.sh. It is not meant
  296. ## @description as a complete system override.
  297. ## @audience private
  298. ## @stability evolving
  299. ## @replaceable yes
  300. function hadoop_exec_hadooprc
  301. {
  302. if [[ -f "${HOME}/.hadooprc" ]]; then
  303. hadoop_debug "Applying the user's .hadooprc"
  304. . "${HOME}/.hadooprc"
  305. fi
  306. }
  307. ## @description Import shellprofile.d content
  308. ## @audience private
  309. ## @stability evolving
  310. ## @replaceable yes
  311. function hadoop_import_shellprofiles
  312. {
  313. local i
  314. local files1
  315. local files2
  316. if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
  317. files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
  318. hadoop_debug "shellprofiles: ${files1[*]}"
  319. else
  320. hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
  321. fi
  322. if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
  323. files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
  324. fi
  325. for i in "${files1[@]}" "${files2[@]}"
  326. do
  327. if [[ -n "${i}"
  328. && -f "${i}" ]]; then
  329. hadoop_debug "Profiles: importing ${i}"
  330. . "${i}"
  331. fi
  332. done
  333. }
  334. ## @description Initialize the registered shell profiles
  335. ## @audience private
  336. ## @stability evolving
  337. ## @replaceable yes
  338. function hadoop_shellprofiles_init
  339. {
  340. local i
  341. for i in ${HADOOP_SHELL_PROFILES}
  342. do
  343. if declare -F _${i}_hadoop_init >/dev/null ; then
  344. hadoop_debug "Profiles: ${i} init"
  345. # shellcheck disable=SC2086
  346. _${i}_hadoop_init
  347. fi
  348. done
  349. }
  350. ## @description Apply the shell profile classpath additions
  351. ## @audience private
  352. ## @stability evolving
  353. ## @replaceable yes
  354. function hadoop_shellprofiles_classpath
  355. {
  356. local i
  357. for i in ${HADOOP_SHELL_PROFILES}
  358. do
  359. if declare -F _${i}_hadoop_classpath >/dev/null ; then
  360. hadoop_debug "Profiles: ${i} classpath"
  361. # shellcheck disable=SC2086
  362. _${i}_hadoop_classpath
  363. fi
  364. done
  365. }
  366. ## @description Apply the shell profile native library additions
  367. ## @audience private
  368. ## @stability evolving
  369. ## @replaceable yes
  370. function hadoop_shellprofiles_nativelib
  371. {
  372. local i
  373. for i in ${HADOOP_SHELL_PROFILES}
  374. do
  375. if declare -F _${i}_hadoop_nativelib >/dev/null ; then
  376. hadoop_debug "Profiles: ${i} nativelib"
  377. # shellcheck disable=SC2086
  378. _${i}_hadoop_nativelib
  379. fi
  380. done
  381. }
  382. ## @description Apply the shell profile final configuration
  383. ## @audience private
  384. ## @stability evolving
  385. ## @replaceable yes
  386. function hadoop_shellprofiles_finalize
  387. {
  388. local i
  389. for i in ${HADOOP_SHELL_PROFILES}
  390. do
  391. if declare -F _${i}_hadoop_finalize >/dev/null ; then
  392. hadoop_debug "Profiles: ${i} finalize"
  393. # shellcheck disable=SC2086
  394. _${i}_hadoop_finalize
  395. fi
  396. done
  397. }
  398. ## @description Initialize the Hadoop shell environment, now that
  399. ## @description user settings have been imported
  400. ## @audience private
  401. ## @stability evolving
  402. ## @replaceable no
  403. function hadoop_basic_init
  404. {
  405. # Some of these are also set in hadoop-env.sh.
  406. # we still set them here just in case hadoop-env.sh is
  407. # broken in some way, set up defaults, etc.
  408. #
  409. # but it is important to note that if you update these
  410. # you also need to update hadoop-env.sh as well!!!
  411. CLASSPATH=""
  412. hadoop_debug "Initialize CLASSPATH"
  413. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  414. [[ -d "${HADOOP_PREFIX}/${HADOOP_COMMON_DIR}" ]]; then
  415. export HADOOP_COMMON_HOME="${HADOOP_PREFIX}"
  416. fi
  417. # default policy file for service-level authorization
  418. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  419. # define HADOOP_HDFS_HOME
  420. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  421. [[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then
  422. export HADOOP_HDFS_HOME="${HADOOP_PREFIX}"
  423. fi
  424. # define HADOOP_YARN_HOME
  425. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  426. [[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then
  427. export HADOOP_YARN_HOME="${HADOOP_PREFIX}"
  428. fi
  429. # define HADOOP_MAPRED_HOME
  430. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  431. [[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then
  432. export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
  433. fi
  434. if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
  435. hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
  436. exit 1
  437. fi
  438. if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
  439. hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
  440. exit 1
  441. fi
  442. if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
  443. hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
  444. exit 1
  445. fi
  446. if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
  447. hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
  448. exit 1
  449. fi
  450. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  451. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
  452. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  453. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  454. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  455. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  456. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  457. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  458. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  459. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  460. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS:-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  461. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  462. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  463. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  464. }
  465. ## @description Set the slave support information to the contents
  466. ## @description of `filename`
  467. ## @audience public
  468. ## @stability stable
  469. ## @replaceable no
  470. ## @param filename
  471. ## @return will exit if file does not exist
  472. function hadoop_populate_slaves_file
  473. {
  474. local slavesfile=$1
  475. shift
  476. if [[ -f "${slavesfile}" ]]; then
  477. # shellcheck disable=2034
  478. HADOOP_SLAVES="${slavesfile}"
  479. elif [[ -f "${HADOOP_CONF_DIR}/${slavesfile}" ]]; then
  480. # shellcheck disable=2034
  481. HADOOP_SLAVES="${HADOOP_CONF_DIR}/${slavesfile}"
  482. else
  483. hadoop_error "ERROR: Cannot find hosts file \"${slavesfile}\""
  484. hadoop_exit_with_usage 1
  485. fi
  486. }
  487. ## @description Rotates the given `file` until `number` of
  488. ## @description files exist.
  489. ## @audience public
  490. ## @stability stable
  491. ## @replaceable no
  492. ## @param filename
  493. ## @param [number]
  494. ## @return $? will contain last mv's return value
  495. function hadoop_rotate_log
  496. {
  497. #
  498. # Users are likely to replace this one for something
  499. # that gzips or uses dates or who knows what.
  500. #
  501. # be aware that &1 and &2 might go through here
  502. # so don't do anything too crazy...
  503. #
  504. local log=$1;
  505. local num=${2:-5};
  506. if [[ -f "${log}" ]]; then # rotate logs
  507. while [[ ${num} -gt 1 ]]; do
  508. #shellcheck disable=SC2086
  509. let prev=${num}-1
  510. if [[ -f "${log}.${prev}" ]]; then
  511. mv "${log}.${prev}" "${log}.${num}"
  512. fi
  513. num=${prev}
  514. done
  515. mv "${log}" "${log}.${num}"
  516. fi
  517. }
  518. ## @description Via ssh, log into `hostname` and run `command`
  519. ## @audience private
  520. ## @stability evolving
  521. ## @replaceable yes
  522. ## @param hostname
  523. ## @param command
  524. ## @param [...]
  525. function hadoop_actual_ssh
  526. {
  527. # we are passing this function to xargs
  528. # should get hostname followed by rest of command line
  529. local slave=$1
  530. shift
  531. # shellcheck disable=SC2086
  532. ssh ${HADOOP_SSH_OPTS} ${slave} $"${@// /\\ }" 2>&1 | sed "s/^/$slave: /"
  533. }
  534. ## @description Connect to ${HADOOP_SLAVES} or ${HADOOP_SLAVE_NAMES}
  535. ## @description and execute command.
  536. ## @audience private
  537. ## @stability evolving
  538. ## @replaceable yes
  539. ## @param command
  540. ## @param [...]
  541. function hadoop_connect_to_hosts
  542. {
  543. # shellcheck disable=SC2124
  544. local params="$@"
  545. #
  546. # ssh (or whatever) to a host
  547. #
  548. # User can specify hostnames or a file where the hostnames are (not both)
  549. if [[ -n "${HADOOP_SLAVES}" && -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  550. hadoop_error "ERROR: Both HADOOP_SLAVES and HADOOP_SLAVE_NAME were defined. Aborting."
  551. exit 1
  552. fi
  553. if [[ -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  554. SLAVE_NAMES=${HADOOP_SLAVE_NAMES}
  555. else
  556. SLAVE_FILE=${HADOOP_SLAVES:-${HADOOP_CONF_DIR}/slaves}
  557. fi
  558. # if pdsh is available, let's use it. otherwise default
  559. # to a loop around ssh. (ugh)
  560. if [[ -e '/usr/bin/pdsh' ]]; then
  561. if [[ -z "${HADOOP_SLAVE_NAMES}" ]] ; then
  562. # if we were given a file, just let pdsh deal with it.
  563. # shellcheck disable=SC2086
  564. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  565. -f "${HADOOP_SSH_PARALLEL}" -w ^"${SLAVE_FILE}" $"${@// /\\ }" 2>&1
  566. else
  567. # no spaces allowed in the pdsh arg host list
  568. # shellcheck disable=SC2086
  569. SLAVE_NAMES=$(echo ${SLAVE_NAMES} | tr -s ' ' ,)
  570. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  571. -f "${HADOOP_SSH_PARALLEL}" -w "${SLAVE_NAMES}" $"${@// /\\ }" 2>&1
  572. fi
  573. else
  574. if [[ -z "${SLAVE_NAMES}" ]]; then
  575. SLAVE_NAMES=$(sed 's/#.*$//;/^$/d' "${SLAVE_FILE}")
  576. fi
  577. hadoop_connect_to_hosts_without_pdsh "${params}"
  578. fi
  579. }
  580. ## @description Connect to ${SLAVE_NAMES} and execute command
  581. ## @description under the environment which does not support pdsh.
  582. ## @audience private
  583. ## @stability evolving
  584. ## @replaceable yes
  585. ## @param command
  586. ## @param [...]
  587. function hadoop_connect_to_hosts_without_pdsh
  588. {
  589. # shellcheck disable=SC2124
  590. local params="$@"
  591. local slaves=(${SLAVE_NAMES})
  592. for (( i = 0; i < ${#slaves[@]}; i++ ))
  593. do
  594. if (( i != 0 && i % HADOOP_SSH_PARALLEL == 0 )); then
  595. wait
  596. fi
  597. # shellcheck disable=SC2086
  598. hadoop_actual_ssh "${slaves[$i]}" ${params} &
  599. done
  600. wait
  601. }
  602. ## @description Utility routine to handle --slaves mode
  603. ## @audience private
  604. ## @stability evolving
  605. ## @replaceable yes
  606. ## @param commandarray
  607. function hadoop_common_slave_mode_execute
  608. {
  609. #
  610. # input should be the command line as given by the user
  611. # in the form of an array
  612. #
  613. local argv=("$@")
  614. # if --slaves is still on the command line, remove it
  615. # to prevent loops
  616. # Also remove --hostnames and --hosts along with arg values
  617. local argsSize=${#argv[@]};
  618. for (( i = 0; i < argsSize; i++ ))
  619. do
  620. if [[ "${argv[$i]}" =~ ^--slaves$ ]]; then
  621. unset argv[$i]
  622. elif [[ "${argv[$i]}" =~ ^--hostnames$ ]] ||
  623. [[ "${argv[$i]}" =~ ^--hosts$ ]]; then
  624. unset argv[$i];
  625. let i++;
  626. unset argv[$i];
  627. fi
  628. done
  629. if [[ ${QATESTMODE} = true ]]; then
  630. echo "${argv[@]}"
  631. return
  632. fi
  633. hadoop_connect_to_hosts -- "${argv[@]}"
  634. }
  635. ## @description Verify that a shell command was passed a valid
  636. ## @description class name
  637. ## @audience public
  638. ## @stability stable
  639. ## @replaceable yes
  640. ## @param classname
  641. ## @return 0 = success
  642. ## @return 1 = failure w/user message
  643. function hadoop_validate_classname
  644. {
  645. local class=$1
  646. shift 1
  647. if [[ ! ${class} =~ \. ]]; then
  648. # assuming the arg is typo of command if it does not conatain ".".
  649. # class belonging to no package is not allowed as a result.
  650. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  651. return 1
  652. fi
  653. return 0
  654. }
  655. ## @description Append the `appendstring` if `checkstring` is not
  656. ## @description present in the given `envvar`
  657. ## @audience public
  658. ## @stability stable
  659. ## @replaceable yes
  660. ## @param envvar
  661. ## @param checkstring
  662. ## @param appendstring
  663. function hadoop_add_param
  664. {
  665. #
  666. # general param dedupe..
  667. # $1 is what we are adding to
  668. # $2 is the name of what we want to add (key)
  669. # $3 is the key+value of what we're adding
  670. #
  671. # doing it this way allows us to support all sorts of
  672. # different syntaxes, just so long as they are space
  673. # delimited
  674. #
  675. if [[ ! ${!1} =~ $2 ]] ; then
  676. #shellcheck disable=SC2140
  677. eval "$1"="'${!1} $3'"
  678. if [[ ${!1:0:1} = ' ' ]]; then
  679. #shellcheck disable=SC2140
  680. eval "$1"="'${!1# }'"
  681. fi
  682. hadoop_debug "$1 accepted $3"
  683. else
  684. hadoop_debug "$1 declined $3"
  685. fi
  686. }
  687. ## @description Register the given `shellprofile` to the Hadoop
  688. ## @description shell subsystem
  689. ## @audience public
  690. ## @stability stable
  691. ## @replaceable yes
  692. ## @param shellprofile
  693. function hadoop_add_profile
  694. {
  695. # shellcheck disable=SC2086
  696. hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
  697. }
  698. ## @description Add a file system object (directory, file,
  699. ## @description wildcard, ...) to the classpath. Optionally provide
  700. ## @description a hint as to where in the classpath it should go.
  701. ## @audience public
  702. ## @stability stable
  703. ## @replaceable yes
  704. ## @param object
  705. ## @param [before|after]
  706. ## @return 0 = success (added or duplicate)
  707. ## @return 1 = failure (doesn't exist or some other reason)
  708. function hadoop_add_classpath
  709. {
  710. # However, with classpath (& JLP), we can do dedupe
  711. # along with some sanity checking (e.g., missing directories)
  712. # since we have a better idea of what is legal
  713. #
  714. # for wildcard at end, we can
  715. # at least check the dir exists
  716. if [[ $1 =~ ^.*\*$ ]]; then
  717. local mp
  718. mp=$(dirname "$1")
  719. if [[ ! -d "${mp}" ]]; then
  720. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  721. return 1
  722. fi
  723. # no wildcard in the middle, so check existence
  724. # (doesn't matter *what* it is)
  725. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  726. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  727. return 1
  728. fi
  729. if [[ -z "${CLASSPATH}" ]]; then
  730. CLASSPATH=$1
  731. hadoop_debug "Initial CLASSPATH=$1"
  732. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  733. if [[ "$2" = "before" ]]; then
  734. CLASSPATH="$1:${CLASSPATH}"
  735. hadoop_debug "Prepend CLASSPATH: $1"
  736. else
  737. CLASSPATH+=:$1
  738. hadoop_debug "Append CLASSPATH: $1"
  739. fi
  740. else
  741. hadoop_debug "Dupe CLASSPATH: $1"
  742. fi
  743. return 0
  744. }
  745. ## @description Add a file system object (directory, file,
  746. ## @description wildcard, ...) to the colonpath. Optionally provide
  747. ## @description a hint as to where in the colonpath it should go.
  748. ## @description Prior to adding, objects are checked for duplication
  749. ## @description and check for existence. Many other functions use
  750. ## @description this function as their base implementation
  751. ## @description including `hadoop_add_javalibpath` and `hadoop_add_ldlibpath`.
  752. ## @audience public
  753. ## @stability stable
  754. ## @replaceable yes
  755. ## @param envvar
  756. ## @param object
  757. ## @param [before|after]
  758. ## @return 0 = success (added or duplicate)
  759. ## @return 1 = failure (doesn't exist or some other reason)
  760. function hadoop_add_colonpath
  761. {
  762. # this is CLASSPATH, JLP, etc but with dedupe but no
  763. # other checking
  764. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  765. if [[ -z "${!1}" ]]; then
  766. # shellcheck disable=SC2086
  767. eval $1="'$2'"
  768. hadoop_debug "Initial colonpath($1): $2"
  769. elif [[ "$3" = "before" ]]; then
  770. # shellcheck disable=SC2086
  771. eval $1="'$2:${!1}'"
  772. hadoop_debug "Prepend colonpath($1): $2"
  773. else
  774. # shellcheck disable=SC2086
  775. eval $1+=":'$2'"
  776. hadoop_debug "Append colonpath($1): $2"
  777. fi
  778. return 0
  779. fi
  780. hadoop_debug "Rejected colonpath($1): $2"
  781. return 1
  782. }
  783. ## @description Add a file system object (directory, file,
  784. ## @description wildcard, ...) to the Java JNI path. Optionally
  785. ## @description provide a hint as to where in the Java JNI path
  786. ## @description it should go.
  787. ## @audience public
  788. ## @stability stable
  789. ## @replaceable yes
  790. ## @param object
  791. ## @param [before|after]
  792. ## @return 0 = success (added or duplicate)
  793. ## @return 1 = failure (doesn't exist or some other reason)
  794. function hadoop_add_javalibpath
  795. {
  796. # specialized function for a common use case
  797. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  798. }
  799. ## @description Add a file system object (directory, file,
  800. ## @description wildcard, ...) to the LD_LIBRARY_PATH. Optionally
  801. ## @description provide a hint as to where in the LD_LIBRARY_PATH
  802. ## @description it should go.
  803. ## @audience public
  804. ## @stability stable
  805. ## @replaceable yes
  806. ## @param object
  807. ## @param [before|after]
  808. ## @return 0 = success (added or duplicate)
  809. ## @return 1 = failure (doesn't exist or some other reason)
  810. function hadoop_add_ldlibpath
  811. {
  812. local status
  813. # specialized function for a common use case
  814. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  815. status=$?
  816. # note that we export this
  817. export LD_LIBRARY_PATH
  818. return ${status}
  819. }
  820. ## @description Add the common/core Hadoop components to the
  821. ## @description environment
  822. ## @audience private
  823. ## @stability evolving
  824. ## @replaceable yes
  825. ## @returns 1 on failure, may exit
  826. ## @returns 0 on success
  827. function hadoop_add_common_to_classpath
  828. {
  829. #
  830. # get all of the common jars+config in the path
  831. #
  832. if [[ -z "${HADOOP_COMMON_HOME}"
  833. || -z "${HADOOP_COMMON_DIR}"
  834. || -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
  835. hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
  836. hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
  837. hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
  838. hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
  839. exit 1
  840. fi
  841. # developers
  842. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  843. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  844. fi
  845. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  846. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  847. }
  848. ## @description Add the user's custom classpath settings to the
  849. ## @description environment
  850. ## @audience private
  851. ## @stability evolving
  852. ## @replaceable yes
  853. function hadoop_add_to_classpath_userpath
  854. {
  855. # Add the user-specified HADOOP_CLASSPATH to the
  856. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  857. # is not set.
  858. # Add it first or last depending on if user has
  859. # set env-var HADOOP_USER_CLASSPATH_FIRST
  860. # we'll also dedupe it, because we're cool like that.
  861. #
  862. declare -a array
  863. declare -i c=0
  864. declare -i j
  865. declare -i i
  866. declare idx
  867. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  868. # I wonder if Java runs on VMS.
  869. for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  870. array[${c}]=${idx}
  871. ((c=c+1))
  872. done
  873. ((j=c-1))
  874. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  875. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  876. for ((i=0; i<=j; i++)); do
  877. hadoop_add_classpath "${array[$i]}" after
  878. done
  879. else
  880. for ((i=j; i>=0; i--)); do
  881. hadoop_add_classpath "${array[$i]}" before
  882. done
  883. fi
  884. fi
  885. fi
  886. }
  887. ## @description Routine to configure any OS-specific settings.
  888. ## @audience public
  889. ## @stability stable
  890. ## @replaceable yes
  891. ## @return may exit on failure conditions
  892. function hadoop_os_tricks
  893. {
  894. local bindv6only
  895. HADOOP_IS_CYGWIN=false
  896. case ${HADOOP_OS_TYPE} in
  897. Darwin)
  898. if [[ -z "${JAVA_HOME}" ]]; then
  899. if [[ -x /usr/libexec/java_home ]]; then
  900. JAVA_HOME="$(/usr/libexec/java_home)"
  901. export JAVA_HOME
  902. else
  903. JAVA_HOME=/Library/Java/Home
  904. export JAVA_HOME
  905. fi
  906. fi
  907. ;;
  908. Linux)
  909. # Newer versions of glibc use an arena memory allocator that
  910. # causes virtual # memory usage to explode. This interacts badly
  911. # with the many threads that we use in Hadoop. Tune the variable
  912. # down to prevent vmem explosion.
  913. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  914. # we put this in QA test mode off so that non-Linux can test
  915. if [[ "${QATESTMODE}" = true ]]; then
  916. return
  917. fi
  918. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  919. # undocumented in hadoop-env.sh because we don't want users to
  920. # shoot themselves in the foot while devs make IPv6 work.
  921. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  922. if [[ -n "${bindv6only}" ]] &&
  923. [[ "${bindv6only}" -eq "1" ]] &&
  924. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  925. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  926. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  927. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  928. exit 1
  929. fi
  930. ;;
  931. CYGWIN*)
  932. # Flag that we're running on Cygwin to trigger path translation later.
  933. HADOOP_IS_CYGWIN=true
  934. ;;
  935. esac
  936. }
  937. ## @description Configure/verify ${JAVA_HOME}
  938. ## @audience public
  939. ## @stability stable
  940. ## @replaceable yes
  941. ## @return may exit on failure conditions
  942. function hadoop_java_setup
  943. {
  944. # Bail if we did not detect it
  945. if [[ -z "${JAVA_HOME}" ]]; then
  946. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  947. exit 1
  948. fi
  949. if [[ ! -d "${JAVA_HOME}" ]]; then
  950. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  951. exit 1
  952. fi
  953. JAVA="${JAVA_HOME}/bin/java"
  954. if [[ ! -x "$JAVA" ]]; then
  955. hadoop_error "ERROR: $JAVA is not executable."
  956. exit 1
  957. fi
  958. }
  959. ## @description Finish Java JNI paths prior to execution
  960. ## @audience private
  961. ## @stability evolving
  962. ## @replaceable yes
  963. function hadoop_finalize_libpaths
  964. {
  965. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  966. hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
  967. hadoop_add_param HADOOP_OPTS java.library.path \
  968. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  969. export LD_LIBRARY_PATH
  970. fi
  971. }
  972. ## @description Finish Java heap parameters prior to execution
  973. ## @audience private
  974. ## @stability evolving
  975. ## @replaceable yes
  976. function hadoop_finalize_hadoop_heap
  977. {
  978. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  979. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  980. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  981. fi
  982. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  983. fi
  984. # backwards compatibility
  985. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  986. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  987. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  988. fi
  989. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  990. fi
  991. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  992. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  993. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  994. fi
  995. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  996. fi
  997. }
  998. ## @description Converts the contents of the variable name
  999. ## @description `varnameref` into the equivalent Windows path.
  1000. ## @description If the second parameter is true, then `varnameref`
  1001. ## @description is treated as though it was a path list.
  1002. ## @audience public
  1003. ## @stability stable
  1004. ## @replaceable yes
  1005. ## @param varnameref
  1006. ## @param [true]
  1007. function hadoop_translate_cygwin_path
  1008. {
  1009. if [[ "${HADOOP_IS_CYGWIN}" = "true" ]]; then
  1010. if [[ "$2" = "true" ]]; then
  1011. #shellcheck disable=SC2016
  1012. eval "$1"='$(cygpath -p -w "${!1}" 2>/dev/null)'
  1013. else
  1014. #shellcheck disable=SC2016
  1015. eval "$1"='$(cygpath -w "${!1}" 2>/dev/null)'
  1016. fi
  1017. fi
  1018. }
  1019. ## @description Finish configuring Hadoop specific system properties
  1020. ## @description prior to executing Java
  1021. ## @audience private
  1022. ## @stability evolving
  1023. ## @replaceable yes
  1024. function hadoop_finalize_hadoop_opts
  1025. {
  1026. hadoop_translate_cygwin_path HADOOP_LOG_DIR
  1027. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  1028. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  1029. HADOOP_HOME=${HADOOP_PREFIX}
  1030. hadoop_translate_cygwin_path HADOOP_HOME
  1031. export HADOOP_HOME
  1032. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  1033. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  1034. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  1035. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  1036. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  1037. }
  1038. ## @description Finish Java classpath prior to execution
  1039. ## @audience private
  1040. ## @stability evolving
  1041. ## @replaceable yes
  1042. function hadoop_finalize_classpath
  1043. {
  1044. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  1045. # user classpath gets added at the last minute. this allows
  1046. # override of CONF dirs and more
  1047. hadoop_add_to_classpath_userpath
  1048. hadoop_translate_cygwin_path CLASSPATH true
  1049. }
  1050. ## @description Finish Catalina configuration prior to execution
  1051. ## @audience private
  1052. ## @stability evolving
  1053. ## @replaceable yes
  1054. function hadoop_finalize_catalina_opts
  1055. {
  1056. local prefix=${HADOOP_CATALINA_PREFIX}
  1057. hadoop_add_param CATALINA_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_PREFIX}"
  1058. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  1059. hadoop_add_param CATALINA_OPTS java.library.path "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  1060. fi
  1061. hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" "-D${prefix}.home.dir=${HADOOP_PREFIX}"
  1062. hadoop_add_param CATALINA_OPTS "${prefix}.config.dir" "-D${prefix}.config.dir=${HADOOP_CATALINA_CONFIG}"
  1063. hadoop_add_param CATALINA_OPTS "${prefix}.log.dir" "-D${prefix}.log.dir=${HADOOP_CATALINA_LOG}"
  1064. hadoop_add_param CATALINA_OPTS "${prefix}.temp.dir" "-D${prefix}.temp.dir=${HADOOP_CATALINA_TEMP}"
  1065. hadoop_add_param CATALINA_OPTS "${prefix}.admin.port" "-D${prefix}.admin.port=${HADOOP_CATALINA_ADMIN_PORT}"
  1066. hadoop_add_param CATALINA_OPTS "${prefix}.http.port" "-D${prefix}.http.port=${HADOOP_CATALINA_HTTP_PORT}"
  1067. hadoop_add_param CATALINA_OPTS "${prefix}.max.threads" "-D${prefix}.max.threads=${HADOOP_CATALINA_MAX_THREADS}"
  1068. hadoop_add_param CATALINA_OPTS "${prefix}.ssl.keystore.file" "-D${prefix}.ssl.keystore.file=${HADOOP_CATALINA_SSL_KEYSTORE_FILE}"
  1069. }
  1070. ## @description Finish all the remaining environment settings prior
  1071. ## @description to executing Java. This is a wrapper that calls
  1072. ## @description the other `finalize` routines.
  1073. ## @audience private
  1074. ## @stability evolving
  1075. ## @replaceable yes
  1076. function hadoop_finalize
  1077. {
  1078. hadoop_shellprofiles_finalize
  1079. hadoop_finalize_classpath
  1080. hadoop_finalize_libpaths
  1081. hadoop_finalize_hadoop_heap
  1082. hadoop_finalize_hadoop_opts
  1083. hadoop_translate_cygwin_path HADOOP_PREFIX
  1084. hadoop_translate_cygwin_path HADOOP_CONF_DIR
  1085. hadoop_translate_cygwin_path HADOOP_COMMON_HOME
  1086. hadoop_translate_cygwin_path HADOOP_HDFS_HOME
  1087. hadoop_translate_cygwin_path HADOOP_YARN_HOME
  1088. hadoop_translate_cygwin_path HADOOP_MAPRED_HOME
  1089. }
  1090. ## @description Print usage information and exit with the passed
  1091. ## @description `exitcode`
  1092. ## @audience public
  1093. ## @stability stable
  1094. ## @replaceable no
  1095. ## @param exitcode
  1096. ## @return This function will always exit.
  1097. function hadoop_exit_with_usage
  1098. {
  1099. local exitcode=$1
  1100. if [[ -z $exitcode ]]; then
  1101. exitcode=1
  1102. fi
  1103. # shellcheck disable=SC2034
  1104. if declare -F hadoop_usage >/dev/null ; then
  1105. hadoop_usage
  1106. elif [[ -x /usr/bin/cowsay ]]; then
  1107. /usr/bin/cowsay -f elephant "Sorry, no help available."
  1108. else
  1109. hadoop_error "Sorry, no help available."
  1110. fi
  1111. exit $exitcode
  1112. }
  1113. ## @description Verify that prerequisites have been met prior to
  1114. ## @description excuting a privileged program.
  1115. ## @audience private
  1116. ## @stability evolving
  1117. ## @replaceable yes
  1118. ## @return This routine may exit.
  1119. function hadoop_verify_secure_prereq
  1120. {
  1121. # if you are on an OS like Illumos that has functional roles
  1122. # and you are using pfexec, you'll probably want to change
  1123. # this.
  1124. # ${EUID} comes from the shell itself!
  1125. if [[ "${EUID}" -ne 0 ]] && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  1126. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  1127. exit 1
  1128. else
  1129. return 0
  1130. fi
  1131. }
  1132. ## @audience private
  1133. ## @stability evolving
  1134. ## @replaceable yes
  1135. function hadoop_setup_secure_service
  1136. {
  1137. # need a more complicated setup? replace me!
  1138. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  1139. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  1140. }
  1141. ## @audience private
  1142. ## @stability evolving
  1143. ## @replaceable yes
  1144. function hadoop_verify_piddir
  1145. {
  1146. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  1147. hadoop_error "No pid directory defined."
  1148. exit 1
  1149. fi
  1150. if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
  1151. hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
  1152. mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
  1153. if [[ $? -gt 0 ]]; then
  1154. hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
  1155. exit 1
  1156. fi
  1157. fi
  1158. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1159. if [[ $? -gt 0 ]]; then
  1160. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  1161. exit 1
  1162. fi
  1163. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  1164. }
  1165. ## @audience private
  1166. ## @stability evolving
  1167. ## @replaceable yes
  1168. function hadoop_verify_logdir
  1169. {
  1170. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  1171. hadoop_error "No log directory defined."
  1172. exit 1
  1173. fi
  1174. if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
  1175. hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
  1176. mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
  1177. if [[ $? -gt 0 ]]; then
  1178. hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
  1179. exit 1
  1180. fi
  1181. fi
  1182. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1183. if [[ $? -gt 0 ]]; then
  1184. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  1185. exit 1
  1186. fi
  1187. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1188. }
  1189. ## @description Determine the status of the daemon referenced
  1190. ## @description by `pidfile`
  1191. ## @audience public
  1192. ## @stability stable
  1193. ## @replaceable yes
  1194. ## @param pidfile
  1195. ## @return (mostly) LSB 4.1.0 compatible status
  1196. function hadoop_status_daemon
  1197. {
  1198. #
  1199. # LSB 4.1.0 compatible status command (1)
  1200. #
  1201. # 0 = program is running
  1202. # 1 = dead, but still a pid (2)
  1203. # 2 = (not used by us)
  1204. # 3 = not running
  1205. #
  1206. # 1 - this is not an endorsement of the LSB
  1207. #
  1208. # 2 - technically, the specification says /var/run/pid, so
  1209. # we should never return this value, but we're giving
  1210. # them the benefit of a doubt and returning 1 even if
  1211. # our pid is not in in /var/run .
  1212. #
  1213. local pidfile=$1
  1214. shift
  1215. local pid
  1216. if [[ -f "${pidfile}" ]]; then
  1217. pid=$(cat "${pidfile}")
  1218. if ps -p "${pid}" > /dev/null 2>&1; then
  1219. return 0
  1220. fi
  1221. return 1
  1222. fi
  1223. return 3
  1224. }
  1225. ## @description Execute the Java `class`, passing along any `options`.
  1226. ## @description Additionally, set the Java property -Dproc_`command`.
  1227. ## @audience public
  1228. ## @stability stable
  1229. ## @replaceable yes
  1230. ## @param command
  1231. ## @param class
  1232. ## @param [options]
  1233. function hadoop_java_exec
  1234. {
  1235. # run a java command. this is used for
  1236. # non-daemons
  1237. local command=$1
  1238. local class=$2
  1239. shift 2
  1240. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1241. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1242. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1243. hadoop_debug "java: ${JAVA}"
  1244. hadoop_debug "Class name: ${class}"
  1245. hadoop_debug "Command line options: $*"
  1246. export CLASSPATH
  1247. #shellcheck disable=SC2086
  1248. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1249. }
  1250. ## @description Start a non-privileged daemon in the foreground.
  1251. ## @audience private
  1252. ## @stability evolving
  1253. ## @replaceable yes
  1254. ## @param command
  1255. ## @param class
  1256. ## @param pidfile
  1257. ## @param [options]
  1258. function hadoop_start_daemon
  1259. {
  1260. # this is our non-privileged daemon starter
  1261. # that fires up a daemon in the *foreground*
  1262. # so complex! so wow! much java!
  1263. local command=$1
  1264. local class=$2
  1265. local pidfile=$3
  1266. shift 3
  1267. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1268. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1269. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1270. hadoop_debug "java: ${JAVA}"
  1271. hadoop_debug "Class name: ${class}"
  1272. hadoop_debug "Command line options: $*"
  1273. # this is for the non-daemon pid creation
  1274. #shellcheck disable=SC2086
  1275. echo $$ > "${pidfile}" 2>/dev/null
  1276. if [[ $? -gt 0 ]]; then
  1277. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  1278. fi
  1279. export CLASSPATH
  1280. #shellcheck disable=SC2086
  1281. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1282. }
  1283. ## @description Start a non-privileged daemon in the background.
  1284. ## @audience private
  1285. ## @stability evolving
  1286. ## @replaceable yes
  1287. ## @param command
  1288. ## @param class
  1289. ## @param pidfile
  1290. ## @param outfile
  1291. ## @param [options]
  1292. function hadoop_start_daemon_wrapper
  1293. {
  1294. local daemonname=$1
  1295. local class=$2
  1296. local pidfile=$3
  1297. local outfile=$4
  1298. shift 4
  1299. local counter
  1300. hadoop_rotate_log "${outfile}"
  1301. hadoop_start_daemon "${daemonname}" \
  1302. "$class" \
  1303. "${pidfile}" \
  1304. "$@" >> "${outfile}" 2>&1 < /dev/null &
  1305. # we need to avoid a race condition here
  1306. # so let's wait for the fork to finish
  1307. # before overriding with the daemonized pid
  1308. (( counter=0 ))
  1309. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  1310. sleep 1
  1311. (( counter++ ))
  1312. done
  1313. # this is for daemon pid creation
  1314. #shellcheck disable=SC2086
  1315. echo $! > "${pidfile}" 2>/dev/null
  1316. if [[ $? -gt 0 ]]; then
  1317. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  1318. fi
  1319. # shellcheck disable=SC2086
  1320. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1321. if [[ $? -gt 0 ]]; then
  1322. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1323. fi
  1324. # shellcheck disable=SC2086
  1325. disown %+ >/dev/null 2>&1
  1326. if [[ $? -gt 0 ]]; then
  1327. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1328. fi
  1329. sleep 1
  1330. # capture the ulimit output
  1331. ulimit -a >> "${outfile}" 2>&1
  1332. # shellcheck disable=SC2086
  1333. if ! ps -p $! >/dev/null 2>&1; then
  1334. return 1
  1335. fi
  1336. return 0
  1337. }
  1338. ## @description Start a privileged daemon in the foreground.
  1339. ## @audience private
  1340. ## @stability evolving
  1341. ## @replaceable yes
  1342. ## @param command
  1343. ## @param class
  1344. ## @param daemonpidfile
  1345. ## @param daemonoutfile
  1346. ## @param daemonerrfile
  1347. ## @param wrapperpidfile
  1348. ## @param [options]
  1349. function hadoop_start_secure_daemon
  1350. {
  1351. # this is used to launch a secure daemon in the *foreground*
  1352. #
  1353. local daemonname=$1
  1354. local class=$2
  1355. # pid file to create for our deamon
  1356. local daemonpidfile=$3
  1357. # where to send stdout. jsvc has bad habits so this *may* be &1
  1358. # which means you send it to stdout!
  1359. local daemonoutfile=$4
  1360. # where to send stderr. same thing, except &2 = stderr
  1361. local daemonerrfile=$5
  1362. local privpidfile=$6
  1363. shift 6
  1364. hadoop_rotate_log "${daemonoutfile}"
  1365. hadoop_rotate_log "${daemonerrfile}"
  1366. # shellcheck disable=SC2153
  1367. jsvc="${JSVC_HOME}/jsvc"
  1368. if [[ ! -f "${jsvc}" ]]; then
  1369. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  1370. hadoop_error "or privileged daemons. Please download and install jsvc from "
  1371. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  1372. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  1373. exit 1
  1374. fi
  1375. # note that shellcheck will throw a
  1376. # bogus for-our-use-case 2086 here.
  1377. # it doesn't properly support multi-line situations
  1378. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1379. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1380. hadoop_debug "Final JSVC_HOME: ${JSVC_HOME}"
  1381. hadoop_debug "jsvc: ${jsvc}"
  1382. hadoop_debug "Class name: ${class}"
  1383. hadoop_debug "Command line options: $*"
  1384. #shellcheck disable=SC2086
  1385. echo $$ > "${privpidfile}" 2>/dev/null
  1386. if [[ $? -gt 0 ]]; then
  1387. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  1388. fi
  1389. # shellcheck disable=SC2086
  1390. exec "${jsvc}" \
  1391. "-Dproc_${daemonname}" \
  1392. -outfile "${daemonoutfile}" \
  1393. -errfile "${daemonerrfile}" \
  1394. -pidfile "${daemonpidfile}" \
  1395. -nodetach \
  1396. -user "${HADOOP_SECURE_USER}" \
  1397. -cp "${CLASSPATH}" \
  1398. ${HADOOP_OPTS} \
  1399. "${class}" "$@"
  1400. }
  1401. ## @description Start a privileged daemon in the background.
  1402. ## @audience private
  1403. ## @stability evolving
  1404. ## @replaceable yes
  1405. ## @param command
  1406. ## @param class
  1407. ## @param daemonpidfile
  1408. ## @param daemonoutfile
  1409. ## @param wrapperpidfile
  1410. ## @param warpperoutfile
  1411. ## @param daemonerrfile
  1412. ## @param [options]
  1413. function hadoop_start_secure_daemon_wrapper
  1414. {
  1415. # this wraps hadoop_start_secure_daemon to take care
  1416. # of the dirty work to launch a daemon in the background!
  1417. local daemonname=$1
  1418. local class=$2
  1419. # same rules as hadoop_start_secure_daemon except we
  1420. # have some additional parameters
  1421. local daemonpidfile=$3
  1422. local daemonoutfile=$4
  1423. # the pid file of the subprocess that spawned our
  1424. # secure launcher
  1425. local jsvcpidfile=$5
  1426. # the output of the subprocess that spawned our secure
  1427. # launcher
  1428. local jsvcoutfile=$6
  1429. local daemonerrfile=$7
  1430. shift 7
  1431. local counter
  1432. hadoop_rotate_log "${jsvcoutfile}"
  1433. hadoop_start_secure_daemon \
  1434. "${daemonname}" \
  1435. "${class}" \
  1436. "${daemonpidfile}" \
  1437. "${daemonoutfile}" \
  1438. "${daemonerrfile}" \
  1439. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  1440. # we need to avoid a race condition here
  1441. # so let's wait for the fork to finish
  1442. # before overriding with the daemonized pid
  1443. (( counter=0 ))
  1444. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  1445. sleep 1
  1446. (( counter++ ))
  1447. done
  1448. # this is for the daemon pid creation
  1449. #shellcheck disable=SC2086
  1450. echo $! > "${jsvcpidfile}" 2>/dev/null
  1451. if [[ $? -gt 0 ]]; then
  1452. hadoop_error "ERROR: Cannot write ${daemonname} pid ${daemonpidfile}."
  1453. fi
  1454. sleep 1
  1455. #shellcheck disable=SC2086
  1456. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1457. if [[ $? -gt 0 ]]; then
  1458. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1459. fi
  1460. if [[ -f "${daemonpidfile}" ]]; then
  1461. #shellcheck disable=SC2046
  1462. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  1463. if [[ $? -gt 0 ]]; then
  1464. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  1465. fi
  1466. fi
  1467. #shellcheck disable=SC2046
  1468. disown %+ >/dev/null 2>&1
  1469. if [[ $? -gt 0 ]]; then
  1470. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1471. fi
  1472. # capture the ulimit output
  1473. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  1474. #shellcheck disable=SC2086
  1475. if ! ps -p $! >/dev/null 2>&1; then
  1476. return 1
  1477. fi
  1478. return 0
  1479. }
  1480. ## @description Stop the non-privileged `command` daemon with that
  1481. ## @description that is running at `pidfile`.
  1482. ## @audience public
  1483. ## @stability stable
  1484. ## @replaceable yes
  1485. ## @param command
  1486. ## @param pidfile
  1487. function hadoop_stop_daemon
  1488. {
  1489. local cmd=$1
  1490. local pidfile=$2
  1491. shift 2
  1492. local pid
  1493. if [[ -f "${pidfile}" ]]; then
  1494. pid=$(cat "$pidfile")
  1495. kill "${pid}" >/dev/null 2>&1
  1496. sleep "${HADOOP_STOP_TIMEOUT}"
  1497. if kill -0 "${pid}" > /dev/null 2>&1; then
  1498. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  1499. kill -9 "${pid}" >/dev/null 2>&1
  1500. fi
  1501. if ps -p "${pid}" > /dev/null 2>&1; then
  1502. hadoop_error "ERROR: Unable to kill ${pid}"
  1503. else
  1504. rm -f "${pidfile}" >/dev/null 2>&1
  1505. fi
  1506. fi
  1507. }
  1508. ## @description Stop the privileged `command` daemon with that
  1509. ## @description that is running at `daemonpidfile` and launched with
  1510. ## @description the wrapper at `wrapperpidfile`.
  1511. ## @audience public
  1512. ## @stability stable
  1513. ## @replaceable yes
  1514. ## @param command
  1515. ## @param daemonpidfile
  1516. ## @param wrapperpidfile
  1517. function hadoop_stop_secure_daemon
  1518. {
  1519. local command=$1
  1520. local daemonpidfile=$2
  1521. local privpidfile=$3
  1522. shift 3
  1523. local ret
  1524. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  1525. ret=$?
  1526. rm -f "${daemonpidfile}" "${privpidfile}" 2>/dev/null
  1527. return ${ret}
  1528. }
  1529. ## @description Manage a non-privileged daemon.
  1530. ## @audience private
  1531. ## @stability evolving
  1532. ## @replaceable yes
  1533. ## @param [start|stop|status|default]
  1534. ## @param command
  1535. ## @param class
  1536. ## @param daemonpidfile
  1537. ## @param daemonoutfile
  1538. ## @param [options]
  1539. function hadoop_daemon_handler
  1540. {
  1541. local daemonmode=$1
  1542. local daemonname=$2
  1543. local class=$3
  1544. local daemon_pidfile=$4
  1545. local daemon_outfile=$5
  1546. shift 5
  1547. case ${daemonmode} in
  1548. status)
  1549. hadoop_status_daemon "${daemon_pidfile}"
  1550. exit $?
  1551. ;;
  1552. stop)
  1553. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  1554. exit $?
  1555. ;;
  1556. ##COMPAT -- older hadoops would also start daemons by default
  1557. start|default)
  1558. hadoop_verify_piddir
  1559. hadoop_verify_logdir
  1560. hadoop_status_daemon "${daemon_pidfile}"
  1561. if [[ $? == 0 ]]; then
  1562. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1563. exit 1
  1564. else
  1565. # stale pid file, so just remove it and continue on
  1566. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1567. fi
  1568. ##COMPAT - differenticate between --daemon start and nothing
  1569. # "nothing" shouldn't detach
  1570. if [[ "$daemonmode" = "default" ]]; then
  1571. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  1572. else
  1573. hadoop_start_daemon_wrapper "${daemonname}" \
  1574. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  1575. fi
  1576. ;;
  1577. esac
  1578. }
  1579. ## @description Manage a privileged daemon.
  1580. ## @audience private
  1581. ## @stability evolving
  1582. ## @replaceable yes
  1583. ## @param [start|stop|status|default]
  1584. ## @param command
  1585. ## @param class
  1586. ## @param daemonpidfile
  1587. ## @param daemonoutfile
  1588. ## @param wrapperpidfile
  1589. ## @param wrapperoutfile
  1590. ## @param wrappererrfile
  1591. ## @param [options]
  1592. function hadoop_secure_daemon_handler
  1593. {
  1594. local daemonmode=$1
  1595. local daemonname=$2
  1596. local classname=$3
  1597. local daemon_pidfile=$4
  1598. local daemon_outfile=$5
  1599. local priv_pidfile=$6
  1600. local priv_outfile=$7
  1601. local priv_errfile=$8
  1602. shift 8
  1603. case ${daemonmode} in
  1604. status)
  1605. hadoop_status_daemon "${daemon_pidfile}"
  1606. exit $?
  1607. ;;
  1608. stop)
  1609. hadoop_stop_secure_daemon "${daemonname}" \
  1610. "${daemon_pidfile}" "${priv_pidfile}"
  1611. exit $?
  1612. ;;
  1613. ##COMPAT -- older hadoops would also start daemons by default
  1614. start|default)
  1615. hadoop_verify_piddir
  1616. hadoop_verify_logdir
  1617. hadoop_status_daemon "${daemon_pidfile}"
  1618. if [[ $? == 0 ]]; then
  1619. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1620. exit 1
  1621. else
  1622. # stale pid file, so just remove it and continue on
  1623. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1624. fi
  1625. ##COMPAT - differenticate between --daemon start and nothing
  1626. # "nothing" shouldn't detach
  1627. if [[ "${daemonmode}" = "default" ]]; then
  1628. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  1629. "${daemon_pidfile}" "${daemon_outfile}" \
  1630. "${priv_errfile}" "${priv_pidfile}" "$@"
  1631. else
  1632. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  1633. "${daemon_pidfile}" "${daemon_outfile}" \
  1634. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  1635. fi
  1636. ;;
  1637. esac
  1638. }
  1639. ## @description Verify that ${USER} is allowed to execute the
  1640. ## @description given subcommand.
  1641. ## @audience public
  1642. ## @stability stable
  1643. ## @replaceable yes
  1644. ## @param subcommand
  1645. ## @return will exit on failure conditions
  1646. function hadoop_verify_user
  1647. {
  1648. local command=$1
  1649. local uservar="HADOOP_${command}_USER"
  1650. if [[ -n ${!uservar} ]]; then
  1651. if [[ ${!uservar} != ${USER} ]]; then
  1652. hadoop_error "ERROR: ${command} can only be executed by ${!uservar}."
  1653. exit 1
  1654. fi
  1655. fi
  1656. }
  1657. ## @description Perform the 'hadoop classpath', etc subcommand with the given
  1658. ## @description parameters
  1659. ## @audience private
  1660. ## @stability evolving
  1661. ## @replaceable yes
  1662. ## @param [parameters]
  1663. ## @return will print & exit with no params
  1664. function hadoop_do_classpath_subcommand
  1665. {
  1666. if [[ "$#" -gt 1 ]]; then
  1667. eval "$1"=org.apache.hadoop.util.Classpath
  1668. else
  1669. hadoop_finalize
  1670. echo "${CLASSPATH}"
  1671. exit 0
  1672. fi
  1673. }
  1674. ## @description generic shell script opton parser. sets
  1675. ## @description HADOOP_PARSE_COUNTER to set number the
  1676. ## @description caller should shift
  1677. ## @audience private
  1678. ## @stability evolving
  1679. ## @replaceable yes
  1680. ## @param [parameters, typically "$@"]
  1681. function hadoop_parse_args
  1682. {
  1683. HADOOP_DAEMON_MODE="default"
  1684. HADOOP_PARSE_COUNTER=0
  1685. # not all of the options supported here are supported by all commands
  1686. # however these are:
  1687. hadoop_add_option "--config dir" "Hadoop config directory"
  1688. hadoop_add_option "--debug" "turn on shell script debug mode"
  1689. hadoop_add_option "--help" "usage information"
  1690. while true; do
  1691. hadoop_debug "hadoop_parse_args: processing $1"
  1692. case $1 in
  1693. --buildpaths)
  1694. # shellcheck disable=SC2034
  1695. HADOOP_ENABLE_BUILD_PATHS=true
  1696. shift
  1697. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1698. ;;
  1699. --config)
  1700. shift
  1701. confdir=$1
  1702. shift
  1703. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1704. if [[ -d "${confdir}" ]]; then
  1705. # shellcheck disable=SC2034
  1706. HADOOP_CONF_DIR="${confdir}"
  1707. elif [[ -z "${confdir}" ]]; then
  1708. hadoop_error "ERROR: No parameter provided for --config "
  1709. hadoop_exit_with_usage 1
  1710. else
  1711. hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
  1712. hadoop_exit_with_usage 1
  1713. fi
  1714. ;;
  1715. --daemon)
  1716. shift
  1717. HADOOP_DAEMON_MODE=$1
  1718. shift
  1719. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1720. if [[ -z "${HADOOP_DAEMON_MODE}" || \
  1721. ! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
  1722. hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
  1723. hadoop_exit_with_usage 1
  1724. fi
  1725. ;;
  1726. --debug)
  1727. shift
  1728. # shellcheck disable=SC2034
  1729. HADOOP_SHELL_SCRIPT_DEBUG=true
  1730. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1731. ;;
  1732. --help|-help|-h|help|--h|--\?|-\?|\?)
  1733. hadoop_exit_with_usage 0
  1734. ;;
  1735. --hostnames)
  1736. shift
  1737. # shellcheck disable=SC2034
  1738. HADOOP_SLAVE_NAMES="$1"
  1739. shift
  1740. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1741. ;;
  1742. --hosts)
  1743. shift
  1744. hadoop_populate_slaves_file "$1"
  1745. shift
  1746. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1747. ;;
  1748. --loglevel)
  1749. shift
  1750. # shellcheck disable=SC2034
  1751. HADOOP_LOGLEVEL="$1"
  1752. shift
  1753. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
  1754. ;;
  1755. --slaves)
  1756. shift
  1757. # shellcheck disable=SC2034
  1758. HADOOP_SLAVE_MODE=true
  1759. ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
  1760. ;;
  1761. *)
  1762. break
  1763. ;;
  1764. esac
  1765. done
  1766. hadoop_debug "hadoop_parse: asking caller to skip ${HADOOP_PARSE_COUNTER}"
  1767. }