hadoop-functions.sh 36 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. function hadoop_error
  17. {
  18. # NOTE: This function is not user replaceable.
  19. echo "$*" 1>&2
  20. }
  21. function hadoop_debug
  22. {
  23. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  24. echo "DEBUG: $*" 1>&2
  25. fi
  26. }
  27. function hadoop_deprecate_envvar
  28. {
  29. #
  30. # Deprecate $1 with $2
  31. local oldvar=$1
  32. local newvar=$2
  33. local oldval=${!oldvar}
  34. local newval=${!newvar}
  35. if [[ -n "${oldval}" ]]; then
  36. hadoop_error "WARNING: ${oldvar} has been replaced by ${newvar}. Using value of ${oldvar}."
  37. # shellcheck disable=SC2086
  38. eval ${newvar}=\"${oldval}\"
  39. # shellcheck disable=SC2086
  40. newval=${oldval}
  41. # shellcheck disable=SC2086
  42. eval ${newvar}=\"${newval}\"
  43. fi
  44. }
  45. function hadoop_bootstrap
  46. {
  47. # NOTE: This function is not user replaceable.
  48. # the root of the Hadoop installation
  49. # See HADOOP-6255 for the expected directory structure layout
  50. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  51. # We can piggyback off of that to figure out where the default
  52. # HADOOP_FREFIX should be. This allows us to run without
  53. # HADOOP_PREFIX ever being defined by a human! As a consequence
  54. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  55. # env var within Hadoop.
  56. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  57. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  58. exit 1
  59. fi
  60. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  61. HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_DEFAULT_PREFIX}
  62. export HADOOP_PREFIX
  63. #
  64. # short-cuts. vendors may redefine these as well, preferably
  65. # in hadoop-layouts.sh
  66. #
  67. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  68. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  69. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  70. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  71. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  72. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  73. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  74. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  75. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  76. # setup a default TOOL_PATH
  77. TOOL_PATH=${TOOL_PATH:-${HADOOP_PREFIX}/share/hadoop/tools/lib/*}
  78. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  79. # defaults
  80. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  81. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  82. }
  83. function hadoop_find_confdir
  84. {
  85. # NOTE: This function is not user replaceable.
  86. local conf_dir
  87. # Look for the basic hadoop configuration area.
  88. #
  89. #
  90. # An attempt at compatibility with some Hadoop 1.x
  91. # installs.
  92. if [[ -e "${HADOOP_PREFIX}/conf/hadoop-env.sh" ]]; then
  93. conf_dir="conf"
  94. else
  95. conf_dir="etc/hadoop"
  96. fi
  97. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_PREFIX}/${conf_dir}}"
  98. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  99. }
  100. function hadoop_verify_confdir
  101. {
  102. # Check only log4j.properties by default.
  103. # --loglevel does not work without logger settings in log4j.log4j.properties.
  104. if [[ ! -f "${HADOOP_CONF_DIR}/log4j.properties" ]]; then
  105. hadoop_error "WARNING: log4j.properties is not found. HADOOP_CONF_DIR may be incomplete."
  106. fi
  107. }
  108. function hadoop_exec_hadoopenv
  109. {
  110. # NOTE: This function is not user replaceable.
  111. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  112. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  113. export HADOOP_ENV_PROCESSED=true
  114. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  115. fi
  116. fi
  117. }
  118. function hadoop_exec_userfuncs
  119. {
  120. # NOTE: This function is not user replaceable.
  121. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  122. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  123. fi
  124. }
  125. function hadoop_exec_hadooprc
  126. {
  127. # Read the user's settings. This provides for users to override
  128. # and/or append hadoop-env.sh. It is not meant as a complete system override.
  129. if [[ -f "${HOME}/.hadooprc" ]]; then
  130. hadoop_debug "Applying the user's .hadooprc"
  131. . "${HOME}/.hadooprc"
  132. fi
  133. }
  134. function hadoop_import_shellprofiles
  135. {
  136. local i
  137. local files1
  138. local files2
  139. if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
  140. files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
  141. else
  142. hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
  143. fi
  144. if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
  145. files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
  146. fi
  147. for i in "${files1[@]}" "${files2[@]}"
  148. do
  149. if [[ -n "${i}" ]]; then
  150. hadoop_debug "Profiles: importing ${i}"
  151. . "${i}"
  152. fi
  153. done
  154. }
  155. function hadoop_shellprofiles_init
  156. {
  157. local i
  158. for i in ${HADOOP_SHELL_PROFILES}
  159. do
  160. if declare -F _${i}_hadoop_init >/dev/null ; then
  161. hadoop_debug "Profiles: ${i} init"
  162. # shellcheck disable=SC2086
  163. _${i}_hadoop_init
  164. fi
  165. done
  166. }
  167. function hadoop_shellprofiles_classpath
  168. {
  169. local i
  170. for i in ${HADOOP_SHELL_PROFILES}
  171. do
  172. if declare -F _${i}_hadoop_classpath >/dev/null ; then
  173. hadoop_debug "Profiles: ${i} classpath"
  174. # shellcheck disable=SC2086
  175. _${i}_hadoop_classpath
  176. fi
  177. done
  178. }
  179. function hadoop_shellprofiles_nativelib
  180. {
  181. local i
  182. for i in ${HADOOP_SHELL_PROFILES}
  183. do
  184. if declare -F _${i}_hadoop_nativelib >/dev/null ; then
  185. hadoop_debug "Profiles: ${i} nativelib"
  186. # shellcheck disable=SC2086
  187. _${i}_hadoop_nativelib
  188. fi
  189. done
  190. }
  191. function hadoop_shellprofiles_finalize
  192. {
  193. local i
  194. for i in ${HADOOP_SHELL_PROFILES}
  195. do
  196. if declare -F _${i}_hadoop_finalize >/dev/null ; then
  197. hadoop_debug "Profiles: ${i} finalize"
  198. # shellcheck disable=SC2086
  199. _${i}_hadoop_finalize
  200. fi
  201. done
  202. }
  203. function hadoop_basic_init
  204. {
  205. # Some of these are also set in hadoop-env.sh.
  206. # we still set them here just in case hadoop-env.sh is
  207. # broken in some way, set up defaults, etc.
  208. #
  209. # but it is important to note that if you update these
  210. # you also need to update hadoop-env.sh as well!!!
  211. CLASSPATH=""
  212. hadoop_debug "Initialize CLASSPATH"
  213. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  214. [[ -d "${HADOOP_PREFIX}/${HADOOP_COMMON_DIR}" ]]; then
  215. export HADOOP_COMMON_HOME="${HADOOP_PREFIX}"
  216. fi
  217. # default policy file for service-level authorization
  218. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  219. # define HADOOP_HDFS_HOME
  220. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  221. [[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then
  222. export HADOOP_HDFS_HOME="${HADOOP_PREFIX}"
  223. fi
  224. # define HADOOP_YARN_HOME
  225. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  226. [[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then
  227. export HADOOP_YARN_HOME="${HADOOP_PREFIX}"
  228. fi
  229. # define HADOOP_MAPRED_HOME
  230. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  231. [[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then
  232. export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
  233. fi
  234. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  235. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
  236. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  237. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  238. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  239. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  240. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  241. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  242. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  243. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  244. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS:-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  245. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  246. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  247. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  248. }
  249. function hadoop_populate_slaves_file()
  250. {
  251. # NOTE: This function is not user replaceable.
  252. local slavesfile=$1
  253. shift
  254. if [[ -f "${slavesfile}" ]]; then
  255. # shellcheck disable=2034
  256. HADOOP_SLAVES="${slavesfile}"
  257. elif [[ -f "${HADOOP_CONF_DIR}/${slavesfile}" ]]; then
  258. # shellcheck disable=2034
  259. HADOOP_SLAVES="${HADOOP_CONF_DIR}/${slavesfile}"
  260. else
  261. hadoop_error "ERROR: Cannot find hosts file \"${slavesfile}\""
  262. hadoop_exit_with_usage 1
  263. fi
  264. }
  265. function hadoop_rotate_log
  266. {
  267. #
  268. # log rotation (mainly used for .out files)
  269. # Users are likely to replace this one for something
  270. # that gzips or uses dates or who knows what.
  271. #
  272. # be aware that &1 and &2 might go through here
  273. # so don't do anything too crazy...
  274. #
  275. local log=$1;
  276. local num=${2:-5};
  277. if [[ -f "${log}" ]]; then # rotate logs
  278. while [[ ${num} -gt 1 ]]; do
  279. #shellcheck disable=SC2086
  280. let prev=${num}-1
  281. if [[ -f "${log}.${prev}" ]]; then
  282. mv "${log}.${prev}" "${log}.${num}"
  283. fi
  284. num=${prev}
  285. done
  286. mv "${log}" "${log}.${num}"
  287. fi
  288. }
  289. function hadoop_actual_ssh
  290. {
  291. # we are passing this function to xargs
  292. # should get hostname followed by rest of command line
  293. local slave=$1
  294. shift
  295. # shellcheck disable=SC2086
  296. ssh ${HADOOP_SSH_OPTS} ${slave} $"${@// /\\ }" 2>&1 | sed "s/^/$slave: /"
  297. }
  298. function hadoop_connect_to_hosts
  299. {
  300. # shellcheck disable=SC2124
  301. local params="$@"
  302. #
  303. # ssh (or whatever) to a host
  304. #
  305. # User can specify hostnames or a file where the hostnames are (not both)
  306. if [[ -n "${HADOOP_SLAVES}" && -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  307. hadoop_error "ERROR: Both HADOOP_SLAVES and HADOOP_SLAVE_NAME were defined. Aborting."
  308. exit 1
  309. fi
  310. if [[ -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  311. SLAVE_NAMES=${HADOOP_SLAVE_NAMES}
  312. else
  313. SLAVE_FILE=${HADOOP_SLAVES:-${HADOOP_CONF_DIR}/slaves}
  314. fi
  315. # if pdsh is available, let's use it. otherwise default
  316. # to a loop around ssh. (ugh)
  317. if [[ -e '/usr/bin/pdsh' ]]; then
  318. if [[ -z "${HADOOP_SLAVE_NAMES}" ]] ; then
  319. # if we were given a file, just let pdsh deal with it.
  320. # shellcheck disable=SC2086
  321. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  322. -f "${HADOOP_SSH_PARALLEL}" -w ^"${SLAVE_FILE}" $"${@// /\\ }" 2>&1
  323. else
  324. # no spaces allowed in the pdsh arg host list
  325. # shellcheck disable=SC2086
  326. SLAVE_NAMES=$(echo ${SLAVE_NAMES} | tr -s ' ' ,)
  327. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  328. -f "${HADOOP_SSH_PARALLEL}" -w "${SLAVE_NAMES}" $"${@// /\\ }" 2>&1
  329. fi
  330. else
  331. if [[ -z "${SLAVE_NAMES}" ]]; then
  332. SLAVE_NAMES=$(sed 's/#.*$//;/^$/d' "${SLAVE_FILE}")
  333. fi
  334. # quoting here gets tricky. it's easier to push it into a function
  335. # so that we don't have to deal with it. However...
  336. # xargs can't use a function so instead we'll export it out
  337. # and force it into a subshell
  338. # moral of the story: just use pdsh.
  339. export -f hadoop_actual_ssh
  340. export HADOOP_SSH_OPTS
  341. # xargs is used with option -I to replace the placeholder in arguments
  342. # list with each hostname read from stdin/pipe. But it consider one
  343. # line as one argument while reading from stdin/pipe. So place each
  344. # hostname in different lines while passing via pipe.
  345. SLAVE_NAMES=$(echo "$SLAVE_NAMES" | tr ' ' '\n' )
  346. echo "${SLAVE_NAMES}" | \
  347. xargs -n 1 -P"${HADOOP_SSH_PARALLEL}" \
  348. -I {} bash -c -- "hadoop_actual_ssh {} ${params}"
  349. wait
  350. fi
  351. }
  352. function hadoop_common_slave_mode_execute
  353. {
  354. #
  355. # input should be the command line as given by the user
  356. # in the form of an array
  357. #
  358. local argv=("$@")
  359. # if --slaves is still on the command line, remove it
  360. # to prevent loops
  361. argv=(${argv[@]/--slaves})
  362. hadoop_connect_to_hosts -- "${argv[@]}"
  363. }
  364. function hadoop_validate_classname
  365. {
  366. local class=$1
  367. shift 1
  368. if [[ ! ${class} =~ \. ]]; then
  369. # assuming the arg is typo of command if it does not conatain ".".
  370. # class belonging to no package is not allowed as a result.
  371. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  372. return 1
  373. fi
  374. return 0
  375. }
  376. function hadoop_add_param
  377. {
  378. #
  379. # general param dedupe..
  380. # $1 is what we are adding to
  381. # $2 is the name of what we want to add (key)
  382. # $3 is the key+value of what we're adding
  383. #
  384. # doing it this way allows us to support all sorts of
  385. # different syntaxes, just so long as they are space
  386. # delimited
  387. #
  388. if [[ ! ${!1} =~ $2 ]] ; then
  389. # shellcheck disable=SC2086
  390. eval $1="'${!1} $3'"
  391. hadoop_debug "$1 accepted $3"
  392. else
  393. hadoop_debug "$1 declined $3"
  394. fi
  395. }
  396. function hadoop_add_profile
  397. {
  398. # shellcheck disable=SC2086
  399. hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
  400. }
  401. function hadoop_add_classpath
  402. {
  403. # two params:
  404. # $1 = directory, file, wildcard, whatever to add
  405. # $2 = before or after, which determines where in the
  406. # classpath this object should go. default is after
  407. # return 0 = success (added or duplicate)
  408. # return 1 = failure (doesn't exist, whatever)
  409. # However, with classpath (& JLP), we can do dedupe
  410. # along with some sanity checking (e.g., missing directories)
  411. # since we have a better idea of what is legal
  412. #
  413. # for wildcard at end, we can
  414. # at least check the dir exists
  415. if [[ $1 =~ ^.*\*$ ]]; then
  416. local mp=$(dirname "$1")
  417. if [[ ! -d "${mp}" ]]; then
  418. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  419. return 1
  420. fi
  421. # no wildcard in the middle, so check existence
  422. # (doesn't matter *what* it is)
  423. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  424. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  425. return 1
  426. fi
  427. if [[ -z "${CLASSPATH}" ]]; then
  428. CLASSPATH=$1
  429. hadoop_debug "Initial CLASSPATH=$1"
  430. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  431. if [[ "$2" = "before" ]]; then
  432. CLASSPATH="$1:${CLASSPATH}"
  433. hadoop_debug "Prepend CLASSPATH: $1"
  434. else
  435. CLASSPATH+=:$1
  436. hadoop_debug "Append CLASSPATH: $1"
  437. fi
  438. else
  439. hadoop_debug "Dupe CLASSPATH: $1"
  440. fi
  441. return 0
  442. }
  443. function hadoop_add_colonpath
  444. {
  445. # two params:
  446. # $1 = directory, file, wildcard, whatever to add
  447. # $2 = before or after, which determines where in the
  448. # classpath this object should go
  449. # return 0 = success
  450. # return 1 = failure (duplicate)
  451. # this is CLASSPATH, JLP, etc but with dedupe but no
  452. # other checking
  453. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  454. if [[ -z "${!1}" ]]; then
  455. # shellcheck disable=SC2086
  456. eval $1="'$2'"
  457. hadoop_debug "Initial colonpath($1): $2"
  458. elif [[ "$3" = "before" ]]; then
  459. # shellcheck disable=SC2086
  460. eval $1="'$2:${!1}'"
  461. hadoop_debug "Prepend colonpath($1): $2"
  462. else
  463. # shellcheck disable=SC2086
  464. eval $1+="'$2'"
  465. hadoop_debug "Append colonpath($1): $2"
  466. fi
  467. return 0
  468. fi
  469. hadoop_debug "Rejected colonpath($1): $2"
  470. return 1
  471. }
  472. function hadoop_add_javalibpath
  473. {
  474. # specialized function for a common use case
  475. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  476. }
  477. function hadoop_add_ldlibpath
  478. {
  479. # specialized function for a common use case
  480. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  481. # note that we export this
  482. export LD_LIBRARY_PATH
  483. }
  484. function hadoop_add_common_to_classpath
  485. {
  486. #
  487. # get all of the common jars+config in the path
  488. #
  489. # developers
  490. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  491. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  492. fi
  493. if [[ -d "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}/webapps" ]]; then
  494. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"
  495. fi
  496. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  497. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  498. }
  499. function hadoop_add_to_classpath_userpath
  500. {
  501. # Add the user-specified HADOOP_CLASSPATH to the
  502. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  503. # is not set.
  504. # Add it first or last depending on if user has
  505. # set env-var HADOOP_USER_CLASSPATH_FIRST
  506. # we'll also dedupe it, because we're cool like that.
  507. #
  508. local c
  509. local array
  510. local i
  511. local j
  512. let c=0
  513. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  514. # I wonder if Java runs on VMS.
  515. for i in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  516. array[$c]=$i
  517. let c+=1
  518. done
  519. let j=c-1
  520. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  521. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  522. for ((i=j; i>=0; i--)); do
  523. hadoop_add_classpath "${array[$i]}" before
  524. done
  525. else
  526. for ((i=0; i<=j; i++)); do
  527. hadoop_add_classpath "${array[$i]}" after
  528. done
  529. fi
  530. fi
  531. fi
  532. }
  533. function hadoop_os_tricks
  534. {
  535. local bindv6only
  536. # Some OSes have special needs. Here's some out of the box examples for OS X,
  537. # Linux and Windows on Cygwin.
  538. # Vendors, replace this with your special sauce.
  539. HADOOP_IS_CYGWIN=false
  540. case ${HADOOP_OS_TYPE} in
  541. Darwin)
  542. if [[ -z "${JAVA_HOME}" ]]; then
  543. if [[ -x /usr/libexec/java_home ]]; then
  544. export JAVA_HOME="$(/usr/libexec/java_home)"
  545. else
  546. export JAVA_HOME=/Library/Java/Home
  547. fi
  548. fi
  549. ;;
  550. Linux)
  551. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  552. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  553. # undocumented in hadoop-env.sh because we don't want users to
  554. # shoot themselves in the foot while devs make IPv6 work.
  555. if [[ -n "${bindv6only}" ]] &&
  556. [[ "${bindv6only}" -eq "1" ]] &&
  557. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  558. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  559. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  560. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  561. exit 1
  562. fi
  563. # Newer versions of glibc use an arena memory allocator that
  564. # causes virtual # memory usage to explode. This interacts badly
  565. # with the many threads that we use in Hadoop. Tune the variable
  566. # down to prevent vmem explosion.
  567. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  568. ;;
  569. CYGWIN*)
  570. # Flag that we're running on Cygwin to trigger path translation later.
  571. HADOOP_IS_CYGWIN=true
  572. ;;
  573. esac
  574. }
  575. function hadoop_java_setup
  576. {
  577. # Bail if we did not detect it
  578. if [[ -z "${JAVA_HOME}" ]]; then
  579. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  580. exit 1
  581. fi
  582. if [[ ! -d "${JAVA_HOME}" ]]; then
  583. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  584. exit 1
  585. fi
  586. JAVA="${JAVA_HOME}/bin/java"
  587. if [[ ! -x "$JAVA" ]]; then
  588. hadoop_error "ERROR: $JAVA is not executable."
  589. exit 1
  590. fi
  591. }
  592. function hadoop_finalize_libpaths
  593. {
  594. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  595. hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
  596. hadoop_add_param HADOOP_OPTS java.library.path \
  597. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  598. export LD_LIBRARY_PATH
  599. fi
  600. }
  601. function hadoop_finalize_hadoop_heap
  602. {
  603. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  604. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  605. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  606. fi
  607. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  608. fi
  609. # backwards compatibility
  610. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  611. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  612. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  613. fi
  614. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  615. fi
  616. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  617. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  618. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  619. fi
  620. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  621. fi
  622. }
  623. # Accepts a variable name. If running on Cygwin, sets the variable value to the
  624. # equivalent translated Windows path by running the cygpath utility. If the
  625. # second argument is true, then the variable is treated as a path list.
  626. function hadoop_translate_cygwin_path
  627. {
  628. if [[ "${HADOOP_IS_CYGWIN}" = "true" ]]; then
  629. if [[ "$2" = "true" ]]; then
  630. #shellcheck disable=SC2016
  631. eval "$1"='$(cygpath -p -w "${!1}" 2>/dev/null)'
  632. else
  633. #shellcheck disable=SC2016
  634. eval "$1"='$(cygpath -w "${!1}" 2>/dev/null)'
  635. fi
  636. fi
  637. }
  638. #
  639. # fill in any last minute options that might not have been defined yet
  640. #
  641. function hadoop_finalize_hadoop_opts
  642. {
  643. hadoop_translate_cygwin_path HADOOP_LOG_DIR
  644. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  645. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  646. HADOOP_HOME=${HADOOP_PREFIX}
  647. hadoop_translate_cygwin_path HADOOP_HOME
  648. export HADOOP_HOME
  649. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}"
  650. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  651. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  652. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  653. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  654. }
  655. function hadoop_finalize_classpath
  656. {
  657. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  658. # user classpath gets added at the last minute. this allows
  659. # override of CONF dirs and more
  660. hadoop_add_to_classpath_userpath
  661. hadoop_translate_cygwin_path CLASSPATH true
  662. }
  663. function hadoop_finalize_catalina_opts
  664. {
  665. local prefix=${HADOOP_CATALINA_PREFIX}
  666. hadoop_add_param CATALINA_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_PREFIX}"
  667. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  668. hadoop_add_param CATALINA_OPTS java.library.path "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  669. fi
  670. hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" "-D${prefix}.home.dir=${HADOOP_PREFIX}"
  671. hadoop_add_param CATALINA_OPTS "${prefix}.config.dir" "-D${prefix}.config.dir=${HADOOP_CATALINA_CONFIG}"
  672. hadoop_add_param CATALINA_OPTS "${prefix}.log.dir" "-D${prefix}.log.dir=${HADOOP_CATALINA_LOG}"
  673. hadoop_add_param CATALINA_OPTS "${prefix}.temp.dir" "-D${prefix}.temp.dir=${HADOOP_CATALINA_TEMP}"
  674. hadoop_add_param CATALINA_OPTS "${prefix}.admin.port" "-D${prefix}.admin.port=${HADOOP_CATALINA_ADMIN_PORT}"
  675. hadoop_add_param CATALINA_OPTS "${prefix}.http.port" "-D${prefix}.http.port=${HADOOP_CATALINA_HTTP_PORT}"
  676. hadoop_add_param CATALINA_OPTS "${prefix}.max.threads" "-D${prefix}.max.threads=${HADOOP_CATALINA_MAX_THREADS}"
  677. hadoop_add_param CATALINA_OPTS "${prefix}.ssl.keystore.file" "-D${prefix}.ssl.keystore.file=${HADOOP_CATALINA_SSL_KEYSTORE_FILE}"
  678. }
  679. function hadoop_finalize
  680. {
  681. hadoop_shellprofiles_finalize
  682. hadoop_finalize_classpath
  683. hadoop_finalize_libpaths
  684. hadoop_finalize_hadoop_heap
  685. hadoop_finalize_hadoop_opts
  686. hadoop_translate_cygwin_path HADOOP_PREFIX
  687. hadoop_translate_cygwin_path HADOOP_CONF_DIR
  688. hadoop_translate_cygwin_path HADOOP_COMMON_HOME
  689. hadoop_translate_cygwin_path HADOOP_HDFS_HOME
  690. hadoop_translate_cygwin_path HADOOP_YARN_HOME
  691. hadoop_translate_cygwin_path HADOOP_MAPRED_HOME
  692. }
  693. function hadoop_exit_with_usage
  694. {
  695. # NOTE: This function is not user replaceable.
  696. local exitcode=$1
  697. if [[ -z $exitcode ]]; then
  698. exitcode=1
  699. fi
  700. if declare -F hadoop_usage >/dev/null ; then
  701. hadoop_usage
  702. elif [[ -x /usr/bin/cowsay ]]; then
  703. /usr/bin/cowsay -f elephant "Sorry, no help available."
  704. else
  705. hadoop_error "Sorry, no help available."
  706. fi
  707. exit $exitcode
  708. }
  709. function hadoop_verify_secure_prereq
  710. {
  711. # if you are on an OS like Illumos that has functional roles
  712. # and you are using pfexec, you'll probably want to change
  713. # this.
  714. # ${EUID} comes from the shell itself!
  715. if [[ "${EUID}" -ne 0 ]] && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  716. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  717. exit 1
  718. else
  719. return 0
  720. fi
  721. }
  722. function hadoop_setup_secure_service
  723. {
  724. # need a more complicated setup? replace me!
  725. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  726. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  727. }
  728. function hadoop_verify_piddir
  729. {
  730. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  731. hadoop_error "No pid directory defined."
  732. exit 1
  733. fi
  734. if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
  735. hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
  736. mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
  737. if [[ $? -gt 0 ]]; then
  738. hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
  739. exit 1
  740. fi
  741. fi
  742. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  743. if [[ $? -gt 0 ]]; then
  744. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  745. exit 1
  746. fi
  747. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  748. }
  749. function hadoop_verify_logdir
  750. {
  751. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  752. hadoop_error "No log directory defined."
  753. exit 1
  754. fi
  755. if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
  756. hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
  757. mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
  758. if [[ $? -gt 0 ]]; then
  759. hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
  760. exit 1
  761. fi
  762. fi
  763. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  764. if [[ $? -gt 0 ]]; then
  765. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  766. exit 1
  767. fi
  768. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  769. }
  770. function hadoop_status_daemon()
  771. {
  772. #
  773. # LSB 4.1.0 compatible status command (1)
  774. #
  775. # 0 = program is running
  776. # 1 = dead, but still a pid (2)
  777. # 2 = (not used by us)
  778. # 3 = not running
  779. #
  780. # 1 - this is not an endorsement of the LSB
  781. #
  782. # 2 - technically, the specification says /var/run/pid, so
  783. # we should never return this value, but we're giving
  784. # them the benefit of a doubt and returning 1 even if
  785. # our pid is not in in /var/run .
  786. #
  787. local pidfile=$1
  788. shift
  789. local pid
  790. if [[ -f "${pidfile}" ]]; then
  791. pid=$(cat "${pidfile}")
  792. if ps -p "${pid}" > /dev/null 2>&1; then
  793. return 0
  794. fi
  795. return 1
  796. fi
  797. return 3
  798. }
  799. function hadoop_java_exec
  800. {
  801. # run a java command. this is used for
  802. # non-daemons
  803. local command=$1
  804. local class=$2
  805. shift 2
  806. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  807. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  808. export CLASSPATH
  809. #shellcheck disable=SC2086
  810. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  811. }
  812. function hadoop_start_daemon
  813. {
  814. # this is our non-privileged daemon starter
  815. # that fires up a daemon in the *foreground*
  816. # so complex! so wow! much java!
  817. local command=$1
  818. local class=$2
  819. local pidfile=$3
  820. shift 3
  821. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  822. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  823. # this is for the non-daemon pid creation
  824. #shellcheck disable=SC2086
  825. echo $$ > "${pidfile}" 2>/dev/null
  826. if [[ $? -gt 0 ]]; then
  827. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  828. fi
  829. export CLASSPATH
  830. #shellcheck disable=SC2086
  831. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  832. }
  833. function hadoop_start_daemon_wrapper
  834. {
  835. # this is our non-privileged daemon start
  836. # that fires up a daemon in the *background*
  837. local daemonname=$1
  838. local class=$2
  839. local pidfile=$3
  840. local outfile=$4
  841. shift 4
  842. local counter
  843. hadoop_rotate_log "${outfile}"
  844. hadoop_start_daemon "${daemonname}" \
  845. "$class" \
  846. "${pidfile}" \
  847. "$@" >> "${outfile}" 2>&1 < /dev/null &
  848. # we need to avoid a race condition here
  849. # so let's wait for the fork to finish
  850. # before overriding with the daemonized pid
  851. (( counter=0 ))
  852. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  853. sleep 1
  854. (( counter++ ))
  855. done
  856. # this is for daemon pid creation
  857. #shellcheck disable=SC2086
  858. echo $! > "${pidfile}" 2>/dev/null
  859. if [[ $? -gt 0 ]]; then
  860. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  861. fi
  862. # shellcheck disable=SC2086
  863. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  864. if [[ $? -gt 0 ]]; then
  865. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  866. fi
  867. # shellcheck disable=SC2086
  868. disown %+ >/dev/null 2>&1
  869. if [[ $? -gt 0 ]]; then
  870. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  871. fi
  872. sleep 1
  873. # capture the ulimit output
  874. ulimit -a >> "${outfile}" 2>&1
  875. # shellcheck disable=SC2086
  876. if ! ps -p $! >/dev/null 2>&1; then
  877. return 1
  878. fi
  879. return 0
  880. }
  881. function hadoop_start_secure_daemon
  882. {
  883. # this is used to launch a secure daemon in the *foreground*
  884. #
  885. local daemonname=$1
  886. local class=$2
  887. # pid file to create for our deamon
  888. local daemonpidfile=$3
  889. # where to send stdout. jsvc has bad habits so this *may* be &1
  890. # which means you send it to stdout!
  891. local daemonoutfile=$4
  892. # where to send stderr. same thing, except &2 = stderr
  893. local daemonerrfile=$5
  894. local privpidfile=$6
  895. shift 6
  896. hadoop_rotate_log "${daemonoutfile}"
  897. hadoop_rotate_log "${daemonerrfile}"
  898. jsvc="${JSVC_HOME}/jsvc"
  899. if [[ ! -f "${jsvc}" ]]; then
  900. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  901. hadoop_error "or privileged daemons. Please download and install jsvc from "
  902. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  903. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  904. exit 1
  905. fi
  906. # note that shellcheck will throw a
  907. # bogus for-our-use-case 2086 here.
  908. # it doesn't properly support multi-line situations
  909. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  910. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  911. #shellcheck disable=SC2086
  912. echo $$ > "${privpidfile}" 2>/dev/null
  913. if [[ $? -gt 0 ]]; then
  914. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  915. fi
  916. exec "${jsvc}" \
  917. "-Dproc_${daemonname}" \
  918. -outfile "${daemonoutfile}" \
  919. -errfile "${daemonerrfile}" \
  920. -pidfile "${daemonpidfile}" \
  921. -nodetach \
  922. -user "${HADOOP_SECURE_USER}" \
  923. -cp "${CLASSPATH}" \
  924. ${HADOOP_OPTS} \
  925. "${class}" "$@"
  926. }
  927. function hadoop_start_secure_daemon_wrapper
  928. {
  929. # this wraps hadoop_start_secure_daemon to take care
  930. # of the dirty work to launch a daemon in the background!
  931. local daemonname=$1
  932. local class=$2
  933. # same rules as hadoop_start_secure_daemon except we
  934. # have some additional parameters
  935. local daemonpidfile=$3
  936. local daemonoutfile=$4
  937. # the pid file of the subprocess that spawned our
  938. # secure launcher
  939. local jsvcpidfile=$5
  940. # the output of the subprocess that spawned our secure
  941. # launcher
  942. local jsvcoutfile=$6
  943. local daemonerrfile=$7
  944. shift 7
  945. local counter
  946. hadoop_rotate_log "${jsvcoutfile}"
  947. hadoop_start_secure_daemon \
  948. "${daemonname}" \
  949. "${class}" \
  950. "${daemonpidfile}" \
  951. "${daemonoutfile}" \
  952. "${daemonerrfile}" \
  953. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  954. # we need to avoid a race condition here
  955. # so let's wait for the fork to finish
  956. # before overriding with the daemonized pid
  957. (( counter=0 ))
  958. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  959. sleep 1
  960. (( counter++ ))
  961. done
  962. # this is for the daemon pid creation
  963. #shellcheck disable=SC2086
  964. echo $! > "${jsvcpidfile}" 2>/dev/null
  965. if [[ $? -gt 0 ]]; then
  966. hadoop_error "ERROR: Cannot write ${daemonname} pid ${daemonpidfile}."
  967. fi
  968. sleep 1
  969. #shellcheck disable=SC2086
  970. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  971. if [[ $? -gt 0 ]]; then
  972. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  973. fi
  974. if [[ -f "${daemonpidfile}" ]]; then
  975. #shellcheck disable=SC2046
  976. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  977. if [[ $? -gt 0 ]]; then
  978. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  979. fi
  980. fi
  981. #shellcheck disable=SC2046
  982. disown %+ >/dev/null 2>&1
  983. if [[ $? -gt 0 ]]; then
  984. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  985. fi
  986. # capture the ulimit output
  987. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  988. #shellcheck disable=SC2086
  989. if ! ps -p $! >/dev/null 2>&1; then
  990. return 1
  991. fi
  992. return 0
  993. }
  994. function hadoop_stop_daemon
  995. {
  996. local cmd=$1
  997. local pidfile=$2
  998. shift 2
  999. local pid
  1000. if [[ -f "${pidfile}" ]]; then
  1001. pid=$(cat "$pidfile")
  1002. kill "${pid}" >/dev/null 2>&1
  1003. sleep "${HADOOP_STOP_TIMEOUT}"
  1004. if kill -0 "${pid}" > /dev/null 2>&1; then
  1005. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  1006. kill -9 "${pid}" >/dev/null 2>&1
  1007. fi
  1008. if ps -p "${pid}" > /dev/null 2>&1; then
  1009. hadoop_error "ERROR: Unable to kill ${pid}"
  1010. else
  1011. rm -f "${pidfile}" >/dev/null 2>&1
  1012. fi
  1013. fi
  1014. }
  1015. function hadoop_stop_secure_daemon
  1016. {
  1017. local command=$1
  1018. local daemonpidfile=$2
  1019. local privpidfile=$3
  1020. shift 3
  1021. local ret
  1022. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  1023. ret=$?
  1024. rm -f "${daemonpidfile}" "${privpidfile}" 2>/dev/null
  1025. return ${ret}
  1026. }
  1027. function hadoop_daemon_handler
  1028. {
  1029. local daemonmode=$1
  1030. local daemonname=$2
  1031. local class=$3
  1032. local daemon_pidfile=$4
  1033. local daemon_outfile=$5
  1034. shift 5
  1035. case ${daemonmode} in
  1036. status)
  1037. hadoop_status_daemon "${daemon_pidfile}"
  1038. exit $?
  1039. ;;
  1040. stop)
  1041. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  1042. exit $?
  1043. ;;
  1044. ##COMPAT -- older hadoops would also start daemons by default
  1045. start|default)
  1046. hadoop_verify_piddir
  1047. hadoop_verify_logdir
  1048. hadoop_status_daemon "${daemon_pidfile}"
  1049. if [[ $? == 0 ]]; then
  1050. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1051. exit 1
  1052. else
  1053. # stale pid file, so just remove it and continue on
  1054. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1055. fi
  1056. ##COMPAT - differenticate between --daemon start and nothing
  1057. # "nothing" shouldn't detach
  1058. if [[ "$daemonmode" = "default" ]]; then
  1059. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  1060. else
  1061. hadoop_start_daemon_wrapper "${daemonname}" \
  1062. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  1063. fi
  1064. ;;
  1065. esac
  1066. }
  1067. function hadoop_secure_daemon_handler
  1068. {
  1069. local daemonmode=$1
  1070. local daemonname=$2
  1071. local classname=$3
  1072. local daemon_pidfile=$4
  1073. local daemon_outfile=$5
  1074. local priv_pidfile=$6
  1075. local priv_outfile=$7
  1076. local priv_errfile=$8
  1077. shift 8
  1078. case ${daemonmode} in
  1079. status)
  1080. hadoop_status_daemon "${daemon_pidfile}"
  1081. exit $?
  1082. ;;
  1083. stop)
  1084. hadoop_stop_secure_daemon "${daemonname}" \
  1085. "${daemon_pidfile}" "${priv_pidfile}"
  1086. exit $?
  1087. ;;
  1088. ##COMPAT -- older hadoops would also start daemons by default
  1089. start|default)
  1090. hadoop_verify_piddir
  1091. hadoop_verify_logdir
  1092. hadoop_status_daemon "${daemon_pidfile}"
  1093. if [[ $? == 0 ]]; then
  1094. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1095. exit 1
  1096. else
  1097. # stale pid file, so just remove it and continue on
  1098. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1099. fi
  1100. ##COMPAT - differenticate between --daemon start and nothing
  1101. # "nothing" shouldn't detach
  1102. if [[ "${daemonmode}" = "default" ]]; then
  1103. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  1104. "${daemon_pidfile}" "${daemon_outfile}" \
  1105. "${priv_errfile}" "${priv_pidfile}" "$@"
  1106. else
  1107. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  1108. "${daemon_pidfile}" "${daemon_outfile}" \
  1109. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  1110. fi
  1111. ;;
  1112. esac
  1113. }
  1114. function hadoop_verify_user
  1115. {
  1116. local command=$1
  1117. local uservar="HADOOP_${command}_USER"
  1118. if [[ -n ${!uservar} ]]; then
  1119. if [[ ${!uservar} != ${USER} ]]; then
  1120. hadoop_error "ERROR: ${command} can only be executed by ${!uservar}."
  1121. exit 1
  1122. fi
  1123. fi
  1124. }
  1125. function hadoop_do_classpath_subcommand
  1126. {
  1127. if [[ "$#" -gt 0 ]]; then
  1128. CLASS=org.apache.hadoop.util.Classpath
  1129. else
  1130. hadoop_finalize
  1131. echo "${CLASSPATH}"
  1132. exit 0
  1133. fi
  1134. }