hadoop-functions.sh 47 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. ## @description Print a message to stderr
  17. ## @audience public
  18. ## @stability stable
  19. ## @replaceable no
  20. ## @param string
  21. function hadoop_error
  22. {
  23. echo "$*" 1>&2
  24. }
  25. ## @description Print a message to stderr if --debug is turned on
  26. ## @audience public
  27. ## @stability stable
  28. ## @replaceable no
  29. ## @param string
  30. function hadoop_debug
  31. {
  32. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  33. echo "DEBUG: $*" 1>&2
  34. fi
  35. }
  36. ## @description Replace `oldvar` with `newvar` if `oldvar` exists.
  37. ## @audience public
  38. ## @stability stable
  39. ## @replaceable yes
  40. ## @param oldvar
  41. ## @param newvar
  42. function hadoop_deprecate_envvar
  43. {
  44. local oldvar=$1
  45. local newvar=$2
  46. local oldval=${!oldvar}
  47. local newval=${!newvar}
  48. if [[ -n "${oldval}" ]]; then
  49. hadoop_error "WARNING: ${oldvar} has been replaced by ${newvar}. Using value of ${oldvar}."
  50. # shellcheck disable=SC2086
  51. eval ${newvar}=\"${oldval}\"
  52. # shellcheck disable=SC2086
  53. newval=${oldval}
  54. # shellcheck disable=SC2086
  55. eval ${newvar}=\"${newval}\"
  56. fi
  57. }
  58. ## @description Bootstraps the Hadoop shell environment
  59. ## @audience private
  60. ## @stability evolving
  61. ## @replaceable no
  62. function hadoop_bootstrap
  63. {
  64. # the root of the Hadoop installation
  65. # See HADOOP-6255 for the expected directory structure layout
  66. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  67. # We can piggyback off of that to figure out where the default
  68. # HADOOP_FREFIX should be. This allows us to run without
  69. # HADOOP_PREFIX ever being defined by a human! As a consequence
  70. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  71. # env var within Hadoop.
  72. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  73. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  74. exit 1
  75. fi
  76. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  77. HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_DEFAULT_PREFIX}
  78. export HADOOP_PREFIX
  79. #
  80. # short-cuts. vendors may redefine these as well, preferably
  81. # in hadoop-layouts.sh
  82. #
  83. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  84. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  85. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  86. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  87. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  88. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  89. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  90. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  91. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  92. # setup a default TOOL_PATH
  93. TOOL_PATH=${TOOL_PATH:-${HADOOP_PREFIX}/share/hadoop/tools/lib/*}
  94. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  95. # defaults
  96. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  97. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  98. }
  99. ## @description Locate Hadoop's configuration directory
  100. ## @audience private
  101. ## @stability evolving
  102. ## @replaceable no
  103. function hadoop_find_confdir
  104. {
  105. local conf_dir
  106. # An attempt at compatibility with some Hadoop 1.x
  107. # installs.
  108. if [[ -e "${HADOOP_PREFIX}/conf/hadoop-env.sh" ]]; then
  109. conf_dir="conf"
  110. else
  111. conf_dir="etc/hadoop"
  112. fi
  113. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_PREFIX}/${conf_dir}}"
  114. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  115. }
  116. ## @description Validate ${HADOOP_CONF_DIR}
  117. ## @audience public
  118. ## @stability stable
  119. ## @replaceable yes
  120. ## @return will exit on failure conditions
  121. function hadoop_verify_confdir
  122. {
  123. # Check only log4j.properties by default.
  124. # --loglevel does not work without logger settings in log4j.log4j.properties.
  125. if [[ ! -f "${HADOOP_CONF_DIR}/log4j.properties" ]]; then
  126. hadoop_error "WARNING: log4j.properties is not found. HADOOP_CONF_DIR may be incomplete."
  127. fi
  128. }
  129. ## @description Import the hadoop-env.sh settings
  130. ## @audience private
  131. ## @stability evolving
  132. ## @replaceable no
  133. function hadoop_exec_hadoopenv
  134. {
  135. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  136. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  137. export HADOOP_ENV_PROCESSED=true
  138. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  139. fi
  140. fi
  141. }
  142. ## @description Import the replaced functions
  143. ## @audience private
  144. ## @stability evolving
  145. ## @replaceable no
  146. function hadoop_exec_userfuncs
  147. {
  148. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  149. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  150. fi
  151. }
  152. ## @description Read the user's settings. This provides for users to
  153. ## @description override and/or append hadoop-env.sh. It is not meant
  154. ## @description as a complete system override.
  155. ## @audience private
  156. ## @stability evolving
  157. ## @replaceable yes
  158. function hadoop_exec_hadooprc
  159. {
  160. if [[ -f "${HOME}/.hadooprc" ]]; then
  161. hadoop_debug "Applying the user's .hadooprc"
  162. . "${HOME}/.hadooprc"
  163. fi
  164. }
  165. ## @description Import shellprofile.d content
  166. ## @audience private
  167. ## @stability evolving
  168. ## @replaceable yes
  169. function hadoop_import_shellprofiles
  170. {
  171. local i
  172. local files1
  173. local files2
  174. if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
  175. files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
  176. else
  177. hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
  178. fi
  179. if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
  180. files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
  181. fi
  182. for i in "${files1[@]}" "${files2[@]}"
  183. do
  184. if [[ -n "${i}" ]]; then
  185. hadoop_debug "Profiles: importing ${i}"
  186. . "${i}"
  187. fi
  188. done
  189. }
  190. ## @description Initialize the registered shell profiles
  191. ## @audience private
  192. ## @stability evolving
  193. ## @replaceable yes
  194. function hadoop_shellprofiles_init
  195. {
  196. local i
  197. for i in ${HADOOP_SHELL_PROFILES}
  198. do
  199. if declare -F _${i}_hadoop_init >/dev/null ; then
  200. hadoop_debug "Profiles: ${i} init"
  201. # shellcheck disable=SC2086
  202. _${i}_hadoop_init
  203. fi
  204. done
  205. }
  206. ## @description Apply the shell profile classpath additions
  207. ## @audience private
  208. ## @stability evolving
  209. ## @replaceable yes
  210. function hadoop_shellprofiles_classpath
  211. {
  212. local i
  213. for i in ${HADOOP_SHELL_PROFILES}
  214. do
  215. if declare -F _${i}_hadoop_classpath >/dev/null ; then
  216. hadoop_debug "Profiles: ${i} classpath"
  217. # shellcheck disable=SC2086
  218. _${i}_hadoop_classpath
  219. fi
  220. done
  221. }
  222. ## @description Apply the shell profile native library additions
  223. ## @audience private
  224. ## @stability evolving
  225. ## @replaceable yes
  226. function hadoop_shellprofiles_nativelib
  227. {
  228. local i
  229. for i in ${HADOOP_SHELL_PROFILES}
  230. do
  231. if declare -F _${i}_hadoop_nativelib >/dev/null ; then
  232. hadoop_debug "Profiles: ${i} nativelib"
  233. # shellcheck disable=SC2086
  234. _${i}_hadoop_nativelib
  235. fi
  236. done
  237. }
  238. ## @description Apply the shell profile final configuration
  239. ## @audience private
  240. ## @stability evolving
  241. ## @replaceable yes
  242. function hadoop_shellprofiles_finalize
  243. {
  244. local i
  245. for i in ${HADOOP_SHELL_PROFILES}
  246. do
  247. if declare -F _${i}_hadoop_finalize >/dev/null ; then
  248. hadoop_debug "Profiles: ${i} finalize"
  249. # shellcheck disable=SC2086
  250. _${i}_hadoop_finalize
  251. fi
  252. done
  253. }
  254. ## @description Initialize the Hadoop shell environment, now that
  255. ## @description user settings have been imported
  256. ## @audience private
  257. ## @stability evolving
  258. ## @replaceable no
  259. function hadoop_basic_init
  260. {
  261. # Some of these are also set in hadoop-env.sh.
  262. # we still set them here just in case hadoop-env.sh is
  263. # broken in some way, set up defaults, etc.
  264. #
  265. # but it is important to note that if you update these
  266. # you also need to update hadoop-env.sh as well!!!
  267. CLASSPATH=""
  268. hadoop_debug "Initialize CLASSPATH"
  269. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  270. [[ -d "${HADOOP_PREFIX}/${HADOOP_COMMON_DIR}" ]]; then
  271. export HADOOP_COMMON_HOME="${HADOOP_PREFIX}"
  272. fi
  273. # default policy file for service-level authorization
  274. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  275. # define HADOOP_HDFS_HOME
  276. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  277. [[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then
  278. export HADOOP_HDFS_HOME="${HADOOP_PREFIX}"
  279. fi
  280. # define HADOOP_YARN_HOME
  281. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  282. [[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then
  283. export HADOOP_YARN_HOME="${HADOOP_PREFIX}"
  284. fi
  285. # define HADOOP_MAPRED_HOME
  286. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  287. [[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then
  288. export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
  289. fi
  290. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  291. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
  292. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  293. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  294. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  295. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  296. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  297. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  298. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  299. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  300. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS:-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  301. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  302. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  303. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  304. }
  305. ## @description Set the slave support information to the contents
  306. ## @description of `filename`
  307. ## @audience public
  308. ## @stability stable
  309. ## @replaceable no
  310. ## @param filename
  311. ## @return will exit if file does not exist
  312. function hadoop_populate_slaves_file
  313. {
  314. local slavesfile=$1
  315. shift
  316. if [[ -f "${slavesfile}" ]]; then
  317. # shellcheck disable=2034
  318. HADOOP_SLAVES="${slavesfile}"
  319. elif [[ -f "${HADOOP_CONF_DIR}/${slavesfile}" ]]; then
  320. # shellcheck disable=2034
  321. HADOOP_SLAVES="${HADOOP_CONF_DIR}/${slavesfile}"
  322. else
  323. hadoop_error "ERROR: Cannot find hosts file \"${slavesfile}\""
  324. hadoop_exit_with_usage 1
  325. fi
  326. }
  327. ## @description Rotates the given `file` until `number` of
  328. ## @description files exist.
  329. ## @audience public
  330. ## @stability stable
  331. ## @replaceable no
  332. ## @param filename
  333. ## @param [number]
  334. ## @return $? will contain last mv's return value
  335. function hadoop_rotate_log
  336. {
  337. #
  338. # Users are likely to replace this one for something
  339. # that gzips or uses dates or who knows what.
  340. #
  341. # be aware that &1 and &2 might go through here
  342. # so don't do anything too crazy...
  343. #
  344. local log=$1;
  345. local num=${2:-5};
  346. if [[ -f "${log}" ]]; then # rotate logs
  347. while [[ ${num} -gt 1 ]]; do
  348. #shellcheck disable=SC2086
  349. let prev=${num}-1
  350. if [[ -f "${log}.${prev}" ]]; then
  351. mv "${log}.${prev}" "${log}.${num}"
  352. fi
  353. num=${prev}
  354. done
  355. mv "${log}" "${log}.${num}"
  356. fi
  357. }
  358. ## @description Via ssh, log into `hostname` and run `command`
  359. ## @audience private
  360. ## @stability evolving
  361. ## @replaceable yes
  362. ## @param hostname
  363. ## @param command
  364. ## @param [...]
  365. function hadoop_actual_ssh
  366. {
  367. # we are passing this function to xargs
  368. # should get hostname followed by rest of command line
  369. local slave=$1
  370. shift
  371. # shellcheck disable=SC2086
  372. ssh ${HADOOP_SSH_OPTS} ${slave} $"${@// /\\ }" 2>&1 | sed "s/^/$slave: /"
  373. }
  374. ## @description Connect to ${HADOOP_SLAVES} or ${HADOOP_SLAVE_NAMES}
  375. ## @description and execute command.
  376. ## @audience private
  377. ## @stability evolving
  378. ## @replaceable yes
  379. ## @param command
  380. ## @param [...]
  381. function hadoop_connect_to_hosts
  382. {
  383. # shellcheck disable=SC2124
  384. local params="$@"
  385. #
  386. # ssh (or whatever) to a host
  387. #
  388. # User can specify hostnames or a file where the hostnames are (not both)
  389. if [[ -n "${HADOOP_SLAVES}" && -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  390. hadoop_error "ERROR: Both HADOOP_SLAVES and HADOOP_SLAVE_NAME were defined. Aborting."
  391. exit 1
  392. fi
  393. if [[ -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  394. SLAVE_NAMES=${HADOOP_SLAVE_NAMES}
  395. else
  396. SLAVE_FILE=${HADOOP_SLAVES:-${HADOOP_CONF_DIR}/slaves}
  397. fi
  398. # if pdsh is available, let's use it. otherwise default
  399. # to a loop around ssh. (ugh)
  400. if [[ -e '/usr/bin/pdsh' ]]; then
  401. if [[ -z "${HADOOP_SLAVE_NAMES}" ]] ; then
  402. # if we were given a file, just let pdsh deal with it.
  403. # shellcheck disable=SC2086
  404. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  405. -f "${HADOOP_SSH_PARALLEL}" -w ^"${SLAVE_FILE}" $"${@// /\\ }" 2>&1
  406. else
  407. # no spaces allowed in the pdsh arg host list
  408. # shellcheck disable=SC2086
  409. SLAVE_NAMES=$(echo ${SLAVE_NAMES} | tr -s ' ' ,)
  410. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  411. -f "${HADOOP_SSH_PARALLEL}" -w "${SLAVE_NAMES}" $"${@// /\\ }" 2>&1
  412. fi
  413. else
  414. if [[ -z "${SLAVE_NAMES}" ]]; then
  415. SLAVE_NAMES=$(sed 's/#.*$//;/^$/d' "${SLAVE_FILE}")
  416. fi
  417. hadoop_connect_to_hosts_without_pdsh "${params}"
  418. fi
  419. }
  420. ## @description Connect to ${SLAVE_NAMES} and execute command
  421. ## @description under the environment which does not support pdsh.
  422. ## @audience private
  423. ## @stability evolving
  424. ## @replaceable yes
  425. ## @param command
  426. ## @param [...]
  427. function hadoop_connect_to_hosts_without_pdsh
  428. {
  429. # shellcheck disable=SC2124
  430. local params="$@"
  431. local slaves=(${SLAVE_NAMES})
  432. for (( i = 0; i < ${#slaves[@]}; i++ ))
  433. do
  434. if (( i != 0 && i % HADOOP_SSH_PARALLEL == 0 )); then
  435. wait
  436. fi
  437. # shellcheck disable=SC2086
  438. hadoop_actual_ssh "${slaves[$i]}" ${params} &
  439. done
  440. wait
  441. }
  442. ## @description Utility routine to handle --slaves mode
  443. ## @audience private
  444. ## @stability evolving
  445. ## @replaceable yes
  446. ## @param commandarray
  447. function hadoop_common_slave_mode_execute
  448. {
  449. #
  450. # input should be the command line as given by the user
  451. # in the form of an array
  452. #
  453. local argv=("$@")
  454. # if --slaves is still on the command line, remove it
  455. # to prevent loops
  456. # Also remove --hostnames and --hosts along with arg values
  457. local argsSize=${#argv[@]};
  458. for (( i = 0; i < $argsSize; i++ ))
  459. do
  460. if [[ "${argv[$i]}" =~ ^--slaves$ ]]; then
  461. unset argv[$i]
  462. elif [[ "${argv[$i]}" =~ ^--hostnames$ ]] ||
  463. [[ "${argv[$i]}" =~ ^--hosts$ ]]; then
  464. unset argv[$i];
  465. let i++;
  466. unset argv[$i];
  467. fi
  468. done
  469. hadoop_connect_to_hosts -- "${argv[@]}"
  470. }
  471. ## @description Verify that a shell command was passed a valid
  472. ## @description class name
  473. ## @audience public
  474. ## @stability stable
  475. ## @replaceable yes
  476. ## @param classname
  477. ## @return 0 = success
  478. ## @return 1 = failure w/user message
  479. function hadoop_validate_classname
  480. {
  481. local class=$1
  482. shift 1
  483. if [[ ! ${class} =~ \. ]]; then
  484. # assuming the arg is typo of command if it does not conatain ".".
  485. # class belonging to no package is not allowed as a result.
  486. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  487. return 1
  488. fi
  489. return 0
  490. }
  491. ## @description Append the `appendstring` if `checkstring` is not
  492. ## @description present in the given `envvar`
  493. ## @audience public
  494. ## @stability stable
  495. ## @replaceable yes
  496. ## @param envvar
  497. ## @param checkstring
  498. ## @param appendstring
  499. function hadoop_add_param
  500. {
  501. #
  502. # general param dedupe..
  503. # $1 is what we are adding to
  504. # $2 is the name of what we want to add (key)
  505. # $3 is the key+value of what we're adding
  506. #
  507. # doing it this way allows us to support all sorts of
  508. # different syntaxes, just so long as they are space
  509. # delimited
  510. #
  511. if [[ ! ${!1} =~ $2 ]] ; then
  512. # shellcheck disable=SC2086
  513. eval $1="'${!1} $3'"
  514. hadoop_debug "$1 accepted $3"
  515. else
  516. hadoop_debug "$1 declined $3"
  517. fi
  518. }
  519. ## @description Register the given `shellprofile` to the Hadoop
  520. ## @description shell subsystem
  521. ## @audience public
  522. ## @stability stable
  523. ## @replaceable yes
  524. ## @param shellprofile
  525. function hadoop_add_profile
  526. {
  527. # shellcheck disable=SC2086
  528. hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
  529. }
  530. ## @description Add a file system object (directory, file,
  531. ## @description wildcard, ...) to the classpath. Optionally provide
  532. ## @description a hint as to where in the classpath it should go.
  533. ## @audience public
  534. ## @stability stable
  535. ## @replaceable yes
  536. ## @param object
  537. ## @param [before|after]
  538. ## @return 0 = success (added or duplicate)
  539. ## @return 1 = failure (doesn't exist or some other reason)
  540. function hadoop_add_classpath
  541. {
  542. # However, with classpath (& JLP), we can do dedupe
  543. # along with some sanity checking (e.g., missing directories)
  544. # since we have a better idea of what is legal
  545. #
  546. # for wildcard at end, we can
  547. # at least check the dir exists
  548. if [[ $1 =~ ^.*\*$ ]]; then
  549. local mp=$(dirname "$1")
  550. if [[ ! -d "${mp}" ]]; then
  551. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  552. return 1
  553. fi
  554. # no wildcard in the middle, so check existence
  555. # (doesn't matter *what* it is)
  556. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  557. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  558. return 1
  559. fi
  560. if [[ -z "${CLASSPATH}" ]]; then
  561. CLASSPATH=$1
  562. hadoop_debug "Initial CLASSPATH=$1"
  563. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  564. if [[ "$2" = "before" ]]; then
  565. CLASSPATH="$1:${CLASSPATH}"
  566. hadoop_debug "Prepend CLASSPATH: $1"
  567. else
  568. CLASSPATH+=:$1
  569. hadoop_debug "Append CLASSPATH: $1"
  570. fi
  571. else
  572. hadoop_debug "Dupe CLASSPATH: $1"
  573. fi
  574. return 0
  575. }
  576. ## @description Add a file system object (directory, file,
  577. ## @description wildcard, ...) to the colonpath. Optionally provide
  578. ## @description a hint as to where in the colonpath it should go.
  579. ## @description Prior to adding, objects are checked for duplication
  580. ## @description and check for existence. Many other functions use
  581. ## @description this function as their base implementation
  582. ## @description including `hadoop_add_javalibpath` and `hadoop_add_ldlibpath`.
  583. ## @audience public
  584. ## @stability stable
  585. ## @replaceable yes
  586. ## @param envvar
  587. ## @param object
  588. ## @param [before|after]
  589. ## @return 0 = success (added or duplicate)
  590. ## @return 1 = failure (doesn't exist or some other reason)
  591. function hadoop_add_colonpath
  592. {
  593. # this is CLASSPATH, JLP, etc but with dedupe but no
  594. # other checking
  595. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  596. if [[ -z "${!1}" ]]; then
  597. # shellcheck disable=SC2086
  598. eval $1="'$2'"
  599. hadoop_debug "Initial colonpath($1): $2"
  600. elif [[ "$3" = "before" ]]; then
  601. # shellcheck disable=SC2086
  602. eval $1="'$2:${!1}'"
  603. hadoop_debug "Prepend colonpath($1): $2"
  604. else
  605. # shellcheck disable=SC2086
  606. eval $1+="'$2'"
  607. hadoop_debug "Append colonpath($1): $2"
  608. fi
  609. return 0
  610. fi
  611. hadoop_debug "Rejected colonpath($1): $2"
  612. return 1
  613. }
  614. ## @description Add a file system object (directory, file,
  615. ## @description wildcard, ...) to the Java JNI path. Optionally
  616. ## @description provide a hint as to where in the Java JNI path
  617. ## @description it should go.
  618. ## @audience public
  619. ## @stability stable
  620. ## @replaceable yes
  621. ## @param object
  622. ## @param [before|after]
  623. ## @return 0 = success (added or duplicate)
  624. ## @return 1 = failure (doesn't exist or some other reason)
  625. function hadoop_add_javalibpath
  626. {
  627. # specialized function for a common use case
  628. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  629. }
  630. ## @description Add a file system object (directory, file,
  631. ## @description wildcard, ...) to the LD_LIBRARY_PATH. Optionally
  632. ## @description provide a hint as to where in the LD_LIBRARY_PATH
  633. ## @description it should go.
  634. ## @audience public
  635. ## @stability stable
  636. ## @replaceable yes
  637. ## @param object
  638. ## @param [before|after]
  639. ## @return 0 = success (added or duplicate)
  640. ## @return 1 = failure (doesn't exist or some other reason)
  641. function hadoop_add_ldlibpath
  642. {
  643. # specialized function for a common use case
  644. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  645. # note that we export this
  646. export LD_LIBRARY_PATH
  647. }
  648. ## @description Add the common/core Hadoop components to the
  649. ## @description environment
  650. ## @audience private
  651. ## @stability evolving
  652. ## @replaceable yes
  653. function hadoop_add_common_to_classpath
  654. {
  655. #
  656. # get all of the common jars+config in the path
  657. #
  658. # developers
  659. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  660. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  661. fi
  662. if [[ -d "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}/webapps" ]]; then
  663. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"
  664. fi
  665. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  666. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  667. }
  668. ## @description Add the user's custom classpath settings to the
  669. ## @description environment
  670. ## @audience private
  671. ## @stability evolving
  672. ## @replaceable yes
  673. function hadoop_add_to_classpath_userpath
  674. {
  675. # Add the user-specified HADOOP_CLASSPATH to the
  676. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  677. # is not set.
  678. # Add it first or last depending on if user has
  679. # set env-var HADOOP_USER_CLASSPATH_FIRST
  680. # we'll also dedupe it, because we're cool like that.
  681. #
  682. local c
  683. local array
  684. local i
  685. local j
  686. let c=0
  687. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  688. # I wonder if Java runs on VMS.
  689. for i in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  690. array[$c]=$i
  691. let c+=1
  692. done
  693. let j=c-1
  694. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  695. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  696. for ((i=j; i>=0; i--)); do
  697. hadoop_add_classpath "${array[$i]}" before
  698. done
  699. else
  700. for ((i=0; i<=j; i++)); do
  701. hadoop_add_classpath "${array[$i]}" after
  702. done
  703. fi
  704. fi
  705. fi
  706. }
  707. ## @description Routine to configure any OS-specific settings.
  708. ## @audience public
  709. ## @stability stable
  710. ## @replaceable yes
  711. ## @return may exit on failure conditions
  712. function hadoop_os_tricks
  713. {
  714. local bindv6only
  715. HADOOP_IS_CYGWIN=false
  716. case ${HADOOP_OS_TYPE} in
  717. Darwin)
  718. if [[ -z "${JAVA_HOME}" ]]; then
  719. if [[ -x /usr/libexec/java_home ]]; then
  720. export JAVA_HOME="$(/usr/libexec/java_home)"
  721. else
  722. export JAVA_HOME=/Library/Java/Home
  723. fi
  724. fi
  725. ;;
  726. Linux)
  727. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  728. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  729. # undocumented in hadoop-env.sh because we don't want users to
  730. # shoot themselves in the foot while devs make IPv6 work.
  731. if [[ -n "${bindv6only}" ]] &&
  732. [[ "${bindv6only}" -eq "1" ]] &&
  733. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  734. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  735. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  736. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  737. exit 1
  738. fi
  739. # Newer versions of glibc use an arena memory allocator that
  740. # causes virtual # memory usage to explode. This interacts badly
  741. # with the many threads that we use in Hadoop. Tune the variable
  742. # down to prevent vmem explosion.
  743. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  744. ;;
  745. CYGWIN*)
  746. # Flag that we're running on Cygwin to trigger path translation later.
  747. HADOOP_IS_CYGWIN=true
  748. ;;
  749. esac
  750. }
  751. ## @description Configure/verify ${JAVA_HOME}
  752. ## @audience public
  753. ## @stability stable
  754. ## @replaceable yes
  755. ## @return may exit on failure conditions
  756. function hadoop_java_setup
  757. {
  758. # Bail if we did not detect it
  759. if [[ -z "${JAVA_HOME}" ]]; then
  760. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  761. exit 1
  762. fi
  763. if [[ ! -d "${JAVA_HOME}" ]]; then
  764. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  765. exit 1
  766. fi
  767. JAVA="${JAVA_HOME}/bin/java"
  768. if [[ ! -x "$JAVA" ]]; then
  769. hadoop_error "ERROR: $JAVA is not executable."
  770. exit 1
  771. fi
  772. }
  773. ## @description Finish Java JNI paths prior to execution
  774. ## @audience private
  775. ## @stability evolving
  776. ## @replaceable yes
  777. function hadoop_finalize_libpaths
  778. {
  779. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  780. hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
  781. hadoop_add_param HADOOP_OPTS java.library.path \
  782. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  783. export LD_LIBRARY_PATH
  784. fi
  785. }
  786. ## @description Finish Java heap parameters prior to execution
  787. ## @audience private
  788. ## @stability evolving
  789. ## @replaceable yes
  790. function hadoop_finalize_hadoop_heap
  791. {
  792. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  793. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  794. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  795. fi
  796. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  797. fi
  798. # backwards compatibility
  799. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  800. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  801. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  802. fi
  803. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  804. fi
  805. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  806. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  807. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  808. fi
  809. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  810. fi
  811. }
  812. ## @description Converts the contents of the variable name
  813. ## @description `varnameref` into the equivalent Windows path.
  814. ## @description If the second parameter is true, then `varnameref`
  815. ## @description is treated as though it was a path list.
  816. ## @audience public
  817. ## @stability stable
  818. ## @replaceable yes
  819. ## @param varnameref
  820. ## @param [true]
  821. function hadoop_translate_cygwin_path
  822. {
  823. if [[ "${HADOOP_IS_CYGWIN}" = "true" ]]; then
  824. if [[ "$2" = "true" ]]; then
  825. #shellcheck disable=SC2016
  826. eval "$1"='$(cygpath -p -w "${!1}" 2>/dev/null)'
  827. else
  828. #shellcheck disable=SC2016
  829. eval "$1"='$(cygpath -w "${!1}" 2>/dev/null)'
  830. fi
  831. fi
  832. }
  833. ## @description Finish configuring Hadoop specific system properties
  834. ## @description prior to executing Java
  835. ## @audience private
  836. ## @stability evolving
  837. ## @replaceable yes
  838. function hadoop_finalize_hadoop_opts
  839. {
  840. hadoop_translate_cygwin_path HADOOP_LOG_DIR
  841. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  842. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  843. HADOOP_HOME=${HADOOP_PREFIX}
  844. hadoop_translate_cygwin_path HADOOP_HOME
  845. export HADOOP_HOME
  846. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  847. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  848. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  849. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  850. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  851. }
  852. ## @description Finish Java classpath prior to execution
  853. ## @audience private
  854. ## @stability evolving
  855. ## @replaceable yes
  856. function hadoop_finalize_classpath
  857. {
  858. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  859. # user classpath gets added at the last minute. this allows
  860. # override of CONF dirs and more
  861. hadoop_add_to_classpath_userpath
  862. hadoop_translate_cygwin_path CLASSPATH true
  863. }
  864. ## @description Finish Catalina configuration prior to execution
  865. ## @audience private
  866. ## @stability evolving
  867. ## @replaceable yes
  868. function hadoop_finalize_catalina_opts
  869. {
  870. local prefix=${HADOOP_CATALINA_PREFIX}
  871. hadoop_add_param CATALINA_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_PREFIX}"
  872. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  873. hadoop_add_param CATALINA_OPTS java.library.path "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  874. fi
  875. hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" "-D${prefix}.home.dir=${HADOOP_PREFIX}"
  876. hadoop_add_param CATALINA_OPTS "${prefix}.config.dir" "-D${prefix}.config.dir=${HADOOP_CATALINA_CONFIG}"
  877. hadoop_add_param CATALINA_OPTS "${prefix}.log.dir" "-D${prefix}.log.dir=${HADOOP_CATALINA_LOG}"
  878. hadoop_add_param CATALINA_OPTS "${prefix}.temp.dir" "-D${prefix}.temp.dir=${HADOOP_CATALINA_TEMP}"
  879. hadoop_add_param CATALINA_OPTS "${prefix}.admin.port" "-D${prefix}.admin.port=${HADOOP_CATALINA_ADMIN_PORT}"
  880. hadoop_add_param CATALINA_OPTS "${prefix}.http.port" "-D${prefix}.http.port=${HADOOP_CATALINA_HTTP_PORT}"
  881. hadoop_add_param CATALINA_OPTS "${prefix}.max.threads" "-D${prefix}.max.threads=${HADOOP_CATALINA_MAX_THREADS}"
  882. hadoop_add_param CATALINA_OPTS "${prefix}.ssl.keystore.file" "-D${prefix}.ssl.keystore.file=${HADOOP_CATALINA_SSL_KEYSTORE_FILE}"
  883. }
  884. ## @description Finish all the remaining environment settings prior
  885. ## @description to executing Java. This is a wrapper that calls
  886. ## @description the other `finalize` routines.
  887. ## @audience private
  888. ## @stability evolving
  889. ## @replaceable yes
  890. function hadoop_finalize
  891. {
  892. hadoop_shellprofiles_finalize
  893. hadoop_finalize_classpath
  894. hadoop_finalize_libpaths
  895. hadoop_finalize_hadoop_heap
  896. hadoop_finalize_hadoop_opts
  897. hadoop_translate_cygwin_path HADOOP_PREFIX
  898. hadoop_translate_cygwin_path HADOOP_CONF_DIR
  899. hadoop_translate_cygwin_path HADOOP_COMMON_HOME
  900. hadoop_translate_cygwin_path HADOOP_HDFS_HOME
  901. hadoop_translate_cygwin_path HADOOP_YARN_HOME
  902. hadoop_translate_cygwin_path HADOOP_MAPRED_HOME
  903. }
  904. ## @description Print usage information and exit with the passed
  905. ## @description `exitcode`
  906. ## @audience public
  907. ## @stability stable
  908. ## @replaceable no
  909. ## @param exitcode
  910. ## @return This function will always exit.
  911. function hadoop_exit_with_usage
  912. {
  913. local exitcode=$1
  914. if [[ -z $exitcode ]]; then
  915. exitcode=1
  916. fi
  917. if declare -F hadoop_usage >/dev/null ; then
  918. hadoop_usage
  919. elif [[ -x /usr/bin/cowsay ]]; then
  920. /usr/bin/cowsay -f elephant "Sorry, no help available."
  921. else
  922. hadoop_error "Sorry, no help available."
  923. fi
  924. exit $exitcode
  925. }
  926. ## @description Verify that prerequisites have been met prior to
  927. ## @description excuting a privileged program.
  928. ## @audience private
  929. ## @stability evolving
  930. ## @replaceable yes
  931. ## @return This routine may exit.
  932. function hadoop_verify_secure_prereq
  933. {
  934. # if you are on an OS like Illumos that has functional roles
  935. # and you are using pfexec, you'll probably want to change
  936. # this.
  937. # ${EUID} comes from the shell itself!
  938. if [[ "${EUID}" -ne 0 ]] && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  939. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  940. exit 1
  941. else
  942. return 0
  943. fi
  944. }
  945. ## @audience private
  946. ## @stability evolving
  947. ## @replaceable yes
  948. function hadoop_setup_secure_service
  949. {
  950. # need a more complicated setup? replace me!
  951. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  952. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  953. }
  954. ## @audience private
  955. ## @stability evolving
  956. ## @replaceable yes
  957. function hadoop_verify_piddir
  958. {
  959. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  960. hadoop_error "No pid directory defined."
  961. exit 1
  962. fi
  963. if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
  964. hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
  965. mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
  966. if [[ $? -gt 0 ]]; then
  967. hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
  968. exit 1
  969. fi
  970. fi
  971. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  972. if [[ $? -gt 0 ]]; then
  973. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  974. exit 1
  975. fi
  976. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  977. }
  978. ## @audience private
  979. ## @stability evolving
  980. ## @replaceable yes
  981. function hadoop_verify_logdir
  982. {
  983. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  984. hadoop_error "No log directory defined."
  985. exit 1
  986. fi
  987. if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
  988. hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
  989. mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
  990. if [[ $? -gt 0 ]]; then
  991. hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
  992. exit 1
  993. fi
  994. fi
  995. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  996. if [[ $? -gt 0 ]]; then
  997. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  998. exit 1
  999. fi
  1000. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  1001. }
  1002. ## @description Determine the status of the daemon referenced
  1003. ## @description by `pidfile`
  1004. ## @audience public
  1005. ## @stability stable
  1006. ## @replaceable yes
  1007. ## @param pidfile
  1008. ## @return (mostly) LSB 4.1.0 compatible status
  1009. function hadoop_status_daemon
  1010. {
  1011. #
  1012. # LSB 4.1.0 compatible status command (1)
  1013. #
  1014. # 0 = program is running
  1015. # 1 = dead, but still a pid (2)
  1016. # 2 = (not used by us)
  1017. # 3 = not running
  1018. #
  1019. # 1 - this is not an endorsement of the LSB
  1020. #
  1021. # 2 - technically, the specification says /var/run/pid, so
  1022. # we should never return this value, but we're giving
  1023. # them the benefit of a doubt and returning 1 even if
  1024. # our pid is not in in /var/run .
  1025. #
  1026. local pidfile=$1
  1027. shift
  1028. local pid
  1029. if [[ -f "${pidfile}" ]]; then
  1030. pid=$(cat "${pidfile}")
  1031. if ps -p "${pid}" > /dev/null 2>&1; then
  1032. return 0
  1033. fi
  1034. return 1
  1035. fi
  1036. return 3
  1037. }
  1038. ## @description Execute the Java `class`, passing along any `options`.
  1039. ## @description Additionally, set the Java property -Dproc_`command`.
  1040. ## @audience public
  1041. ## @stability stable
  1042. ## @replaceable yes
  1043. ## @param command
  1044. ## @param class
  1045. ## @param [options]
  1046. function hadoop_java_exec
  1047. {
  1048. # run a java command. this is used for
  1049. # non-daemons
  1050. local command=$1
  1051. local class=$2
  1052. shift 2
  1053. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1054. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1055. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1056. hadoop_debug "java: ${JAVA}"
  1057. hadoop_debug "Class name: ${class}"
  1058. hadoop_debug "Command line options: $*"
  1059. export CLASSPATH
  1060. #shellcheck disable=SC2086
  1061. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1062. }
  1063. ## @description Start a non-privileged daemon in the foreground.
  1064. ## @audience private
  1065. ## @stability evolving
  1066. ## @replaceable yes
  1067. ## @param command
  1068. ## @param class
  1069. ## @param pidfile
  1070. ## @param [options]
  1071. function hadoop_start_daemon
  1072. {
  1073. # this is our non-privileged daemon starter
  1074. # that fires up a daemon in the *foreground*
  1075. # so complex! so wow! much java!
  1076. local command=$1
  1077. local class=$2
  1078. local pidfile=$3
  1079. shift 3
  1080. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1081. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1082. hadoop_debug "Final JAVA_HOME: ${JAVA_HOME}"
  1083. hadoop_debug "java: ${JAVA}"
  1084. hadoop_debug "Class name: ${class}"
  1085. hadoop_debug "Command line options: $*"
  1086. # this is for the non-daemon pid creation
  1087. #shellcheck disable=SC2086
  1088. echo $$ > "${pidfile}" 2>/dev/null
  1089. if [[ $? -gt 0 ]]; then
  1090. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  1091. fi
  1092. export CLASSPATH
  1093. #shellcheck disable=SC2086
  1094. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  1095. }
  1096. ## @description Start a non-privileged daemon in the background.
  1097. ## @audience private
  1098. ## @stability evolving
  1099. ## @replaceable yes
  1100. ## @param command
  1101. ## @param class
  1102. ## @param pidfile
  1103. ## @param outfile
  1104. ## @param [options]
  1105. function hadoop_start_daemon_wrapper
  1106. {
  1107. local daemonname=$1
  1108. local class=$2
  1109. local pidfile=$3
  1110. local outfile=$4
  1111. shift 4
  1112. local counter
  1113. hadoop_rotate_log "${outfile}"
  1114. hadoop_start_daemon "${daemonname}" \
  1115. "$class" \
  1116. "${pidfile}" \
  1117. "$@" >> "${outfile}" 2>&1 < /dev/null &
  1118. # we need to avoid a race condition here
  1119. # so let's wait for the fork to finish
  1120. # before overriding with the daemonized pid
  1121. (( counter=0 ))
  1122. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  1123. sleep 1
  1124. (( counter++ ))
  1125. done
  1126. # this is for daemon pid creation
  1127. #shellcheck disable=SC2086
  1128. echo $! > "${pidfile}" 2>/dev/null
  1129. if [[ $? -gt 0 ]]; then
  1130. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  1131. fi
  1132. # shellcheck disable=SC2086
  1133. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1134. if [[ $? -gt 0 ]]; then
  1135. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1136. fi
  1137. # shellcheck disable=SC2086
  1138. disown %+ >/dev/null 2>&1
  1139. if [[ $? -gt 0 ]]; then
  1140. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1141. fi
  1142. sleep 1
  1143. # capture the ulimit output
  1144. ulimit -a >> "${outfile}" 2>&1
  1145. # shellcheck disable=SC2086
  1146. if ! ps -p $! >/dev/null 2>&1; then
  1147. return 1
  1148. fi
  1149. return 0
  1150. }
  1151. ## @description Start a privileged daemon in the foreground.
  1152. ## @audience private
  1153. ## @stability evolving
  1154. ## @replaceable yes
  1155. ## @param command
  1156. ## @param class
  1157. ## @param daemonpidfile
  1158. ## @param daemonoutfile
  1159. ## @param daemonerrfile
  1160. ## @param wrapperpidfile
  1161. ## @param [options]
  1162. function hadoop_start_secure_daemon
  1163. {
  1164. # this is used to launch a secure daemon in the *foreground*
  1165. #
  1166. local daemonname=$1
  1167. local class=$2
  1168. # pid file to create for our deamon
  1169. local daemonpidfile=$3
  1170. # where to send stdout. jsvc has bad habits so this *may* be &1
  1171. # which means you send it to stdout!
  1172. local daemonoutfile=$4
  1173. # where to send stderr. same thing, except &2 = stderr
  1174. local daemonerrfile=$5
  1175. local privpidfile=$6
  1176. shift 6
  1177. hadoop_rotate_log "${daemonoutfile}"
  1178. hadoop_rotate_log "${daemonerrfile}"
  1179. jsvc="${JSVC_HOME}/jsvc"
  1180. if [[ ! -f "${jsvc}" ]]; then
  1181. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  1182. hadoop_error "or privileged daemons. Please download and install jsvc from "
  1183. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  1184. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  1185. exit 1
  1186. fi
  1187. # note that shellcheck will throw a
  1188. # bogus for-our-use-case 2086 here.
  1189. # it doesn't properly support multi-line situations
  1190. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  1191. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  1192. hadoop_debug "Final JSVC_HOME: ${JSVC_HOME}"
  1193. hadoop_debug "jsvc: ${jsvc}"
  1194. hadoop_debug "Class name: ${class}"
  1195. hadoop_debug "Command line options: $*"
  1196. #shellcheck disable=SC2086
  1197. echo $$ > "${privpidfile}" 2>/dev/null
  1198. if [[ $? -gt 0 ]]; then
  1199. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  1200. fi
  1201. exec "${jsvc}" \
  1202. "-Dproc_${daemonname}" \
  1203. -outfile "${daemonoutfile}" \
  1204. -errfile "${daemonerrfile}" \
  1205. -pidfile "${daemonpidfile}" \
  1206. -nodetach \
  1207. -user "${HADOOP_SECURE_USER}" \
  1208. -cp "${CLASSPATH}" \
  1209. ${HADOOP_OPTS} \
  1210. "${class}" "$@"
  1211. }
  1212. ## @description Start a privileged daemon in the background.
  1213. ## @audience private
  1214. ## @stability evolving
  1215. ## @replaceable yes
  1216. ## @param command
  1217. ## @param class
  1218. ## @param daemonpidfile
  1219. ## @param daemonoutfile
  1220. ## @param wrapperpidfile
  1221. ## @param warpperoutfile
  1222. ## @param daemonerrfile
  1223. ## @param [options]
  1224. function hadoop_start_secure_daemon_wrapper
  1225. {
  1226. # this wraps hadoop_start_secure_daemon to take care
  1227. # of the dirty work to launch a daemon in the background!
  1228. local daemonname=$1
  1229. local class=$2
  1230. # same rules as hadoop_start_secure_daemon except we
  1231. # have some additional parameters
  1232. local daemonpidfile=$3
  1233. local daemonoutfile=$4
  1234. # the pid file of the subprocess that spawned our
  1235. # secure launcher
  1236. local jsvcpidfile=$5
  1237. # the output of the subprocess that spawned our secure
  1238. # launcher
  1239. local jsvcoutfile=$6
  1240. local daemonerrfile=$7
  1241. shift 7
  1242. local counter
  1243. hadoop_rotate_log "${jsvcoutfile}"
  1244. hadoop_start_secure_daemon \
  1245. "${daemonname}" \
  1246. "${class}" \
  1247. "${daemonpidfile}" \
  1248. "${daemonoutfile}" \
  1249. "${daemonerrfile}" \
  1250. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  1251. # we need to avoid a race condition here
  1252. # so let's wait for the fork to finish
  1253. # before overriding with the daemonized pid
  1254. (( counter=0 ))
  1255. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  1256. sleep 1
  1257. (( counter++ ))
  1258. done
  1259. # this is for the daemon pid creation
  1260. #shellcheck disable=SC2086
  1261. echo $! > "${jsvcpidfile}" 2>/dev/null
  1262. if [[ $? -gt 0 ]]; then
  1263. hadoop_error "ERROR: Cannot write ${daemonname} pid ${daemonpidfile}."
  1264. fi
  1265. sleep 1
  1266. #shellcheck disable=SC2086
  1267. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  1268. if [[ $? -gt 0 ]]; then
  1269. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  1270. fi
  1271. if [[ -f "${daemonpidfile}" ]]; then
  1272. #shellcheck disable=SC2046
  1273. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  1274. if [[ $? -gt 0 ]]; then
  1275. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  1276. fi
  1277. fi
  1278. #shellcheck disable=SC2046
  1279. disown %+ >/dev/null 2>&1
  1280. if [[ $? -gt 0 ]]; then
  1281. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  1282. fi
  1283. # capture the ulimit output
  1284. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  1285. #shellcheck disable=SC2086
  1286. if ! ps -p $! >/dev/null 2>&1; then
  1287. return 1
  1288. fi
  1289. return 0
  1290. }
  1291. ## @description Stop the non-privileged `command` daemon with that
  1292. ## @description that is running at `pidfile`.
  1293. ## @audience public
  1294. ## @stability stable
  1295. ## @replaceable yes
  1296. ## @param command
  1297. ## @param pidfile
  1298. function hadoop_stop_daemon
  1299. {
  1300. local cmd=$1
  1301. local pidfile=$2
  1302. shift 2
  1303. local pid
  1304. if [[ -f "${pidfile}" ]]; then
  1305. pid=$(cat "$pidfile")
  1306. kill "${pid}" >/dev/null 2>&1
  1307. sleep "${HADOOP_STOP_TIMEOUT}"
  1308. if kill -0 "${pid}" > /dev/null 2>&1; then
  1309. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  1310. kill -9 "${pid}" >/dev/null 2>&1
  1311. fi
  1312. if ps -p "${pid}" > /dev/null 2>&1; then
  1313. hadoop_error "ERROR: Unable to kill ${pid}"
  1314. else
  1315. rm -f "${pidfile}" >/dev/null 2>&1
  1316. fi
  1317. fi
  1318. }
  1319. ## @description Stop the privileged `command` daemon with that
  1320. ## @description that is running at `daemonpidfile` and launched with
  1321. ## @description the wrapper at `wrapperpidfile`.
  1322. ## @audience public
  1323. ## @stability stable
  1324. ## @replaceable yes
  1325. ## @param command
  1326. ## @param daemonpidfile
  1327. ## @param wrapperpidfile
  1328. function hadoop_stop_secure_daemon
  1329. {
  1330. local command=$1
  1331. local daemonpidfile=$2
  1332. local privpidfile=$3
  1333. shift 3
  1334. local ret
  1335. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  1336. ret=$?
  1337. rm -f "${daemonpidfile}" "${privpidfile}" 2>/dev/null
  1338. return ${ret}
  1339. }
  1340. ## @description Manage a non-privileged daemon.
  1341. ## @audience private
  1342. ## @stability evolving
  1343. ## @replaceable yes
  1344. ## @param [start|stop|status|default]
  1345. ## @param command
  1346. ## @param class
  1347. ## @param daemonpidfile
  1348. ## @param daemonoutfile
  1349. ## @param [options]
  1350. function hadoop_daemon_handler
  1351. {
  1352. local daemonmode=$1
  1353. local daemonname=$2
  1354. local class=$3
  1355. local daemon_pidfile=$4
  1356. local daemon_outfile=$5
  1357. shift 5
  1358. case ${daemonmode} in
  1359. status)
  1360. hadoop_status_daemon "${daemon_pidfile}"
  1361. exit $?
  1362. ;;
  1363. stop)
  1364. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  1365. exit $?
  1366. ;;
  1367. ##COMPAT -- older hadoops would also start daemons by default
  1368. start|default)
  1369. hadoop_verify_piddir
  1370. hadoop_verify_logdir
  1371. hadoop_status_daemon "${daemon_pidfile}"
  1372. if [[ $? == 0 ]]; then
  1373. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1374. exit 1
  1375. else
  1376. # stale pid file, so just remove it and continue on
  1377. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1378. fi
  1379. ##COMPAT - differenticate between --daemon start and nothing
  1380. # "nothing" shouldn't detach
  1381. if [[ "$daemonmode" = "default" ]]; then
  1382. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  1383. else
  1384. hadoop_start_daemon_wrapper "${daemonname}" \
  1385. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  1386. fi
  1387. ;;
  1388. esac
  1389. }
  1390. ## @description Manage a privileged daemon.
  1391. ## @audience private
  1392. ## @stability evolving
  1393. ## @replaceable yes
  1394. ## @param [start|stop|status|default]
  1395. ## @param command
  1396. ## @param class
  1397. ## @param daemonpidfile
  1398. ## @param daemonoutfile
  1399. ## @param wrapperpidfile
  1400. ## @param wrapperoutfile
  1401. ## @param wrappererrfile
  1402. ## @param [options]
  1403. function hadoop_secure_daemon_handler
  1404. {
  1405. local daemonmode=$1
  1406. local daemonname=$2
  1407. local classname=$3
  1408. local daemon_pidfile=$4
  1409. local daemon_outfile=$5
  1410. local priv_pidfile=$6
  1411. local priv_outfile=$7
  1412. local priv_errfile=$8
  1413. shift 8
  1414. case ${daemonmode} in
  1415. status)
  1416. hadoop_status_daemon "${daemon_pidfile}"
  1417. exit $?
  1418. ;;
  1419. stop)
  1420. hadoop_stop_secure_daemon "${daemonname}" \
  1421. "${daemon_pidfile}" "${priv_pidfile}"
  1422. exit $?
  1423. ;;
  1424. ##COMPAT -- older hadoops would also start daemons by default
  1425. start|default)
  1426. hadoop_verify_piddir
  1427. hadoop_verify_logdir
  1428. hadoop_status_daemon "${daemon_pidfile}"
  1429. if [[ $? == 0 ]]; then
  1430. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1431. exit 1
  1432. else
  1433. # stale pid file, so just remove it and continue on
  1434. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1435. fi
  1436. ##COMPAT - differenticate between --daemon start and nothing
  1437. # "nothing" shouldn't detach
  1438. if [[ "${daemonmode}" = "default" ]]; then
  1439. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  1440. "${daemon_pidfile}" "${daemon_outfile}" \
  1441. "${priv_errfile}" "${priv_pidfile}" "$@"
  1442. else
  1443. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  1444. "${daemon_pidfile}" "${daemon_outfile}" \
  1445. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  1446. fi
  1447. ;;
  1448. esac
  1449. }
  1450. ## @description Verify that ${USER} is allowed to execute the
  1451. ## @description given subcommand.
  1452. ## @audience public
  1453. ## @stability stable
  1454. ## @replaceable yes
  1455. ## @param subcommand
  1456. ## @return will exit on failure conditions
  1457. function hadoop_verify_user
  1458. {
  1459. local command=$1
  1460. local uservar="HADOOP_${command}_USER"
  1461. if [[ -n ${!uservar} ]]; then
  1462. if [[ ${!uservar} != ${USER} ]]; then
  1463. hadoop_error "ERROR: ${command} can only be executed by ${!uservar}."
  1464. exit 1
  1465. fi
  1466. fi
  1467. }
  1468. ## @description Perform the 'hadoop classpath', etc subcommand with the given
  1469. ## @description parameters
  1470. ## @audience private
  1471. ## @stability evolving
  1472. ## @replaceable yes
  1473. ## @param [parameters]
  1474. ## @return will print & exit with no params
  1475. function hadoop_do_classpath_subcommand
  1476. {
  1477. if [[ "$#" -gt 1 ]]; then
  1478. eval "$1"=org.apache.hadoop.util.Classpath
  1479. else
  1480. hadoop_finalize
  1481. echo "${CLASSPATH}"
  1482. exit 0
  1483. fi
  1484. }