hadoop-functions.sh 35 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198
  1. #!/bin/bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. function hadoop_error
  17. {
  18. # NOTE: This function is not user replaceable.
  19. echo "$*" 1>&2
  20. }
  21. function hadoop_debug
  22. {
  23. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  24. echo "DEBUG: $*" 1>&2
  25. fi
  26. }
  27. function hadoop_bootstrap_init
  28. {
  29. # NOTE: This function is not user replaceable.
  30. # the root of the Hadoop installation
  31. # See HADOOP-6255 for the expected directory structure layout
  32. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  33. # We can piggyback off of that to figure out where the default
  34. # HADOOP_FREFIX should be. This allows us to run without
  35. # HADOOP_PREFIX ever being defined by a human! As a consequence
  36. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  37. # env var within Hadoop.
  38. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  39. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  40. exit 1
  41. fi
  42. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  43. HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_DEFAULT_PREFIX}
  44. export HADOOP_PREFIX
  45. #
  46. # short-cuts. vendors may redefine these as well, preferably
  47. # in hadoop-layouts.sh
  48. #
  49. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  50. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  51. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  52. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  53. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  54. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  55. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  56. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  57. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  58. # setup a default TOOL_PATH
  59. TOOL_PATH=${TOOL_PATH:-${HADOOP_PREFIX}/share/hadoop/tools/lib/*}
  60. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  61. # defaults
  62. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  63. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  64. }
  65. function hadoop_find_confdir
  66. {
  67. # NOTE: This function is not user replaceable.
  68. local conf_dir
  69. # Look for the basic hadoop configuration area.
  70. #
  71. #
  72. # An attempt at compatibility with some Hadoop 1.x
  73. # installs.
  74. if [[ -e "${HADOOP_PREFIX}/conf/hadoop-env.sh" ]]; then
  75. conf_dir="conf"
  76. else
  77. conf_dir="etc/hadoop"
  78. fi
  79. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_PREFIX}/${conf_dir}}"
  80. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  81. }
  82. function hadoop_exec_hadoopenv
  83. {
  84. # NOTE: This function is not user replaceable.
  85. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  86. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  87. export HADOOP_ENV_PROCESSED=true
  88. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  89. fi
  90. fi
  91. }
  92. function hadoop_exec_userfuncs
  93. {
  94. # NOTE: This function is not user replaceable.
  95. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  96. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  97. fi
  98. }
  99. function hadoop_exec_hadooprc
  100. {
  101. # Read the user's settings. This provides for users to override
  102. # and/or append hadoop-env.sh. It is not meant as a complete system override.
  103. if [[ -f "${HOME}/.hadooprc" ]]; then
  104. hadoop_debug "Applying the user's .hadooprc"
  105. . "${HOME}/.hadooprc"
  106. fi
  107. }
  108. function hadoop_basic_init
  109. {
  110. # Some of these are also set in hadoop-env.sh.
  111. # we still set them here just in case hadoop-env.sh is
  112. # broken in some way, set up defaults, etc.
  113. #
  114. # but it is important to note that if you update these
  115. # you also need to update hadoop-env.sh as well!!!
  116. # CLASSPATH initially contains $HADOOP_CONF_DIR
  117. CLASSPATH="${HADOOP_CONF_DIR}"
  118. hadoop_debug "Initial CLASSPATH=${HADOOP_CONF_DIR}"
  119. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  120. [[ -d "${HADOOP_PREFIX}/${HADOOP_COMMON_DIR}" ]]; then
  121. export HADOOP_COMMON_HOME="${HADOOP_PREFIX}"
  122. fi
  123. # default policy file for service-level authorization
  124. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  125. # define HADOOP_HDFS_HOME
  126. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  127. [[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then
  128. export HADOOP_HDFS_HOME="${HADOOP_PREFIX}"
  129. fi
  130. # define HADOOP_YARN_HOME
  131. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  132. [[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then
  133. export HADOOP_YARN_HOME="${HADOOP_PREFIX}"
  134. fi
  135. # define HADOOP_MAPRED_HOME
  136. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  137. [[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then
  138. export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
  139. fi
  140. HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
  141. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
  142. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  143. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  144. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  145. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  146. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  147. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  148. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  149. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  150. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS:-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  151. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  152. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  153. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  154. }
  155. function hadoop_populate_slaves_file()
  156. {
  157. # NOTE: This function is not user replaceable.
  158. local slavesfile=$1
  159. shift
  160. if [[ -f "${slavesfile}" ]]; then
  161. # shellcheck disable=2034
  162. HADOOP_SLAVES="${slavesfile}"
  163. elif [[ -f "${HADOOP_CONF_DIR}/${slavesfile}" ]]; then
  164. # shellcheck disable=2034
  165. HADOOP_SLAVES="${HADOOP_CONF_DIR}/${slavesfile}"
  166. # shellcheck disable=2034
  167. YARN_SLAVES="${HADOOP_CONF_DIR}/${slavesfile}"
  168. else
  169. hadoop_error "ERROR: Cannot find hosts file \"${slavesfile}\""
  170. hadoop_exit_with_usage 1
  171. fi
  172. }
  173. function hadoop_rotate_log
  174. {
  175. #
  176. # log rotation (mainly used for .out files)
  177. # Users are likely to replace this one for something
  178. # that gzips or uses dates or who knows what.
  179. #
  180. # be aware that &1 and &2 might go through here
  181. # so don't do anything too crazy...
  182. #
  183. local log=$1;
  184. local num=${2:-5};
  185. if [[ -f "${log}" ]]; then # rotate logs
  186. while [[ ${num} -gt 1 ]]; do
  187. #shellcheck disable=SC2086
  188. let prev=${num}-1
  189. if [[ -f "${log}.${prev}" ]]; then
  190. mv "${log}.${prev}" "${log}.${num}"
  191. fi
  192. num=${prev}
  193. done
  194. mv "${log}" "${log}.${num}"
  195. fi
  196. }
  197. function hadoop_actual_ssh
  198. {
  199. # we are passing this function to xargs
  200. # should get hostname followed by rest of command line
  201. local slave=$1
  202. shift
  203. # shellcheck disable=SC2086
  204. ssh ${HADOOP_SSH_OPTS} ${slave} $"${@// /\\ }" 2>&1 | sed "s/^/$slave: /"
  205. }
  206. function hadoop_connect_to_hosts
  207. {
  208. # shellcheck disable=SC2124
  209. local params="$@"
  210. #
  211. # ssh (or whatever) to a host
  212. #
  213. # User can specify hostnames or a file where the hostnames are (not both)
  214. if [[ -n "${HADOOP_SLAVES}" && -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  215. hadoop_error "ERROR: Both HADOOP_SLAVES and HADOOP_SLAVE_NAME were defined. Aborting."
  216. exit 1
  217. fi
  218. if [[ -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  219. SLAVE_NAMES=${HADOOP_SLAVE_NAMES}
  220. else
  221. SLAVE_FILE=${HADOOP_SLAVES:-${HADOOP_CONF_DIR}/slaves}
  222. fi
  223. # if pdsh is available, let's use it. otherwise default
  224. # to a loop around ssh. (ugh)
  225. if [[ -e '/usr/bin/pdsh' ]]; then
  226. if [[ -z "${HADOOP_SLAVE_NAMES}" ]] ; then
  227. # if we were given a file, just let pdsh deal with it.
  228. # shellcheck disable=SC2086
  229. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  230. -f "${HADOOP_SSH_PARALLEL}" -w ^"${SLAVE_FILE}" $"${@// /\\ }" 2>&1
  231. else
  232. # no spaces allowed in the pdsh arg host list
  233. # shellcheck disable=SC2086
  234. SLAVE_NAMES=$(echo ${SLAVE_NAMES} | tr -s ' ' ,)
  235. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  236. -f "${HADOOP_SSH_PARALLEL}" -w "${SLAVE_NAMES}" $"${@// /\\ }" 2>&1
  237. fi
  238. else
  239. if [[ -z "${SLAVE_NAMES}" ]]; then
  240. SLAVE_NAMES=$(sed 's/#.*$//;/^$/d' "${SLAVE_FILE}")
  241. fi
  242. # quoting here gets tricky. it's easier to push it into a function
  243. # so that we don't have to deal with it. However...
  244. # xargs can't use a function so instead we'll export it out
  245. # and force it into a subshell
  246. # moral of the story: just use pdsh.
  247. export -f hadoop_actual_ssh
  248. export HADOOP_SSH_OPTS
  249. # xargs is used with option -I to replace the placeholder in arguments
  250. # list with each hostname read from stdin/pipe. But it consider one
  251. # line as one argument while reading from stdin/pipe. So place each
  252. # hostname in different lines while passing via pipe.
  253. SLAVE_NAMES=$(echo "$SLAVE_NAMES" | tr ' ' '\n' )
  254. echo "${SLAVE_NAMES}" | \
  255. xargs -n 1 -P"${HADOOP_SSH_PARALLEL}" \
  256. -I {} bash -c -- "hadoop_actual_ssh {} ${params}"
  257. wait
  258. fi
  259. }
  260. function hadoop_validate_classname
  261. {
  262. local class=$1
  263. shift 1
  264. if [[ ! ${class} =~ \. ]]; then
  265. # assuming the arg is typo of command if it does not conatain ".".
  266. # class belonging to no package is not allowed as a result.
  267. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  268. return 1
  269. fi
  270. return 0
  271. }
  272. function hadoop_add_param
  273. {
  274. #
  275. # general param dedupe..
  276. # $1 is what we are adding to
  277. # $2 is the name of what we want to add (key)
  278. # $3 is the key+value of what we're adding
  279. #
  280. # doing it this way allows us to support all sorts of
  281. # different syntaxes, just so long as they are space
  282. # delimited
  283. #
  284. if [[ ! ${!1} =~ $2 ]] ; then
  285. # shellcheck disable=SC2086
  286. eval $1="'${!1} $3'"
  287. hadoop_debug "$1 accepted $3"
  288. else
  289. hadoop_debug "$1 declined $3"
  290. fi
  291. }
  292. function hadoop_add_classpath
  293. {
  294. # two params:
  295. # $1 = directory, file, wildcard, whatever to add
  296. # $2 = before or after, which determines where in the
  297. # classpath this object should go. default is after
  298. # return 0 = success (added or duplicate)
  299. # return 1 = failure (doesn't exist, whatever)
  300. # However, with classpath (& JLP), we can do dedupe
  301. # along with some sanity checking (e.g., missing directories)
  302. # since we have a better idea of what is legal
  303. #
  304. # for wildcard at end, we can
  305. # at least check the dir exists
  306. if [[ $1 =~ ^.*\*$ ]]; then
  307. local mp=$(dirname "$1")
  308. if [[ ! -d "${mp}" ]]; then
  309. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  310. return 1
  311. fi
  312. # no wildcard in the middle, so check existence
  313. # (doesn't matter *what* it is)
  314. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  315. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  316. return 1
  317. fi
  318. if [[ -z "${CLASSPATH}" ]]; then
  319. CLASSPATH=$1
  320. hadoop_debug "Initial CLASSPATH=$1"
  321. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  322. if [[ "$2" = "before" ]]; then
  323. CLASSPATH="$1:${CLASSPATH}"
  324. hadoop_debug "Prepend CLASSPATH: $1"
  325. else
  326. CLASSPATH+=:$1
  327. hadoop_debug "Append CLASSPATH: $1"
  328. fi
  329. else
  330. hadoop_debug "Dupe CLASSPATH: $1"
  331. fi
  332. return 0
  333. }
  334. function hadoop_add_colonpath
  335. {
  336. # two params:
  337. # $1 = directory, file, wildcard, whatever to add
  338. # $2 = before or after, which determines where in the
  339. # classpath this object should go
  340. # return 0 = success
  341. # return 1 = failure (duplicate)
  342. # this is CLASSPATH, JLP, etc but with dedupe but no
  343. # other checking
  344. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  345. if [[ -z "${!1}" ]]; then
  346. # shellcheck disable=SC2086
  347. eval $1="'$2'"
  348. hadoop_debug "Initial colonpath($1): $2"
  349. elif [[ "$3" = "before" ]]; then
  350. # shellcheck disable=SC2086
  351. eval $1="'$2:${!1}'"
  352. hadoop_debug "Prepend colonpath($1): $2"
  353. else
  354. # shellcheck disable=SC2086
  355. eval $1+="'$2'"
  356. hadoop_debug "Append colonpath($1): $2"
  357. fi
  358. return 0
  359. fi
  360. hadoop_debug "Rejected colonpath($1): $2"
  361. return 1
  362. }
  363. function hadoop_add_javalibpath
  364. {
  365. # specialized function for a common use case
  366. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  367. }
  368. function hadoop_add_ldlibpath
  369. {
  370. # specialized function for a common use case
  371. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  372. # note that we export this
  373. export LD_LIBRARY_PATH
  374. }
  375. function hadoop_add_to_classpath_common
  376. {
  377. #
  378. # get all of the common jars+config in the path
  379. #
  380. # developers
  381. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  382. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  383. fi
  384. if [[ -d "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}/webapps" ]]; then
  385. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"
  386. fi
  387. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  388. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  389. }
  390. function hadoop_add_to_classpath_hdfs
  391. {
  392. #
  393. # get all of the hdfs jars+config in the path
  394. #
  395. # developers
  396. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  397. hadoop_add_classpath "${HADOOP_HDFS_HOME}/hadoop-hdfs/target/classes"
  398. fi
  399. # put hdfs in classpath if present
  400. if [[ -d "${HADOOP_HDFS_HOME}/${HDFS_DIR}/webapps" ]]; then
  401. hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_DIR}"
  402. fi
  403. hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR}"'/*'
  404. hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_DIR}"'/*'
  405. }
  406. function hadoop_add_to_classpath_yarn
  407. {
  408. local i
  409. #
  410. # get all of the yarn jars+config in the path
  411. #
  412. # developers
  413. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  414. for i in yarn-api yarn-common yarn-mapreduce yarn-master-worker \
  415. yarn-server/yarn-server-nodemanager \
  416. yarn-server/yarn-server-common \
  417. yarn-server/yarn-server-resourcemanager; do
  418. hadoop_add_classpath "${HADOOP_YARN_HOME}/$i/target/classes"
  419. done
  420. hadoop_add_classpath "${HADOOP_YARN_HOME}/build/test/classes"
  421. hadoop_add_classpath "${HADOOP_YARN_HOME}/build/tools"
  422. fi
  423. if [[ -d "${HADOOP_YARN_HOME}/${YARN_DIR}/webapps" ]]; then
  424. hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}"
  425. fi
  426. hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}"'/*'
  427. hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}"'/*'
  428. }
  429. function hadoop_add_to_classpath_mapred
  430. {
  431. #
  432. # get all of the mapreduce jars+config in the path
  433. #
  434. # developers
  435. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  436. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-shuffle/target/classes"
  437. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-common/target/classes"
  438. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-hs/target/classes"
  439. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-hs-plugins/target/classes"
  440. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-app/target/classes"
  441. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-jobclient/target/classes"
  442. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-core/target/classes"
  443. fi
  444. if [[ -d "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}/webapps" ]]; then
  445. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}"
  446. fi
  447. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_LIB_JARS_DIR}"'/*'
  448. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}"'/*'
  449. }
  450. function hadoop_add_to_classpath_userpath
  451. {
  452. # Add the user-specified HADOOP_CLASSPATH to the
  453. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  454. # is not set.
  455. # Add it first or last depending on if user has
  456. # set env-var HADOOP_USER_CLASSPATH_FIRST
  457. # we'll also dedupe it, because we're cool like that.
  458. #
  459. local c
  460. local array
  461. local i
  462. local j
  463. let c=0
  464. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  465. # I wonder if Java runs on VMS.
  466. for i in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  467. array[$c]=$i
  468. let c+=1
  469. done
  470. let j=c-1
  471. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  472. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  473. for ((i=j; i>=0; i--)); do
  474. hadoop_add_classpath "${array[$i]}" before
  475. done
  476. else
  477. for ((i=0; i<=j; i++)); do
  478. hadoop_add_classpath "${array[$i]}" after
  479. done
  480. fi
  481. fi
  482. fi
  483. }
  484. function hadoop_os_tricks
  485. {
  486. local bindv6only
  487. # some OSes have special needs. here's some out of the box
  488. # examples for OS X and Linux. Vendors, replace this with your special sauce.
  489. case ${HADOOP_OS_TYPE} in
  490. Darwin)
  491. if [[ -z "${JAVA_HOME}" ]]; then
  492. if [[ -x /usr/libexec/java_home ]]; then
  493. export JAVA_HOME="$(/usr/libexec/java_home)"
  494. else
  495. export JAVA_HOME=/Library/Java/Home
  496. fi
  497. fi
  498. ;;
  499. Linux)
  500. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  501. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  502. # undocumented in hadoop-env.sh because we don't want users to
  503. # shoot themselves in the foot while devs make IPv6 work.
  504. if [[ -n "${bindv6only}" ]] &&
  505. [[ "${bindv6only}" -eq "1" ]] &&
  506. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  507. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  508. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  509. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  510. exit 1
  511. fi
  512. # Newer versions of glibc use an arena memory allocator that
  513. # causes virtual # memory usage to explode. This interacts badly
  514. # with the many threads that we use in Hadoop. Tune the variable
  515. # down to prevent vmem explosion.
  516. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  517. ;;
  518. esac
  519. }
  520. function hadoop_java_setup
  521. {
  522. # Bail if we did not detect it
  523. if [[ -z "${JAVA_HOME}" ]]; then
  524. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  525. exit 1
  526. fi
  527. if [[ ! -d "${JAVA_HOME}" ]]; then
  528. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  529. exit 1
  530. fi
  531. JAVA="${JAVA_HOME}/bin/java"
  532. if [[ ! -x "$JAVA" ]]; then
  533. hadoop_error "ERROR: $JAVA is not executable."
  534. exit 1
  535. fi
  536. }
  537. function hadoop_finalize_libpaths
  538. {
  539. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  540. hadoop_add_param HADOOP_OPTS java.library.path \
  541. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  542. export LD_LIBRARY_PATH
  543. fi
  544. }
  545. function hadoop_finalize_hadoop_heap
  546. {
  547. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  548. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  549. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  550. fi
  551. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  552. fi
  553. # backwards compatibility
  554. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  555. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  556. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  557. fi
  558. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  559. fi
  560. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  561. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  562. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  563. fi
  564. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  565. fi
  566. }
  567. #
  568. # fill in any last minute options that might not have been defined yet
  569. #
  570. function hadoop_finalize_hadoop_opts
  571. {
  572. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  573. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  574. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_PREFIX}"
  575. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  576. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  577. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  578. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  579. }
  580. function hadoop_finalize_classpath
  581. {
  582. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  583. # user classpath gets added at the last minute. this allows
  584. # override of CONF dirs and more
  585. hadoop_add_to_classpath_userpath
  586. }
  587. function hadoop_finalize_catalina_opts
  588. {
  589. local prefix=${HADOOP_CATALINA_PREFIX}
  590. hadoop_add_param CATALINA_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_PREFIX}"
  591. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  592. hadoop_add_param CATALINA_OPTS java.library.path "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  593. fi
  594. hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" "-D${prefix}.home.dir=${HADOOP_PREFIX}"
  595. hadoop_add_param CATALINA_OPTS "${prefix}.config.dir" "-D${prefix}.config.dir=${HADOOP_CATALINA_CONFIG}"
  596. hadoop_add_param CATALINA_OPTS "${prefix}.log.dir" "-D${prefix}.log.dir=${HADOOP_CATALINA_LOG}"
  597. hadoop_add_param CATALINA_OPTS "${prefix}.temp.dir" "-D${prefix}.temp.dir=${HADOOP_CATALINA_TEMP}"
  598. hadoop_add_param CATALINA_OPTS "${prefix}.admin.port" "-D${prefix}.admin.port=${HADOOP_CATALINA_ADMIN_PORT}"
  599. hadoop_add_param CATALINA_OPTS "${prefix}.http.port" "-D${prefix}.http.port=${HADOOP_CATALINA_HTTP_PORT}"
  600. hadoop_add_param CATALINA_OPTS "${prefix}.max.threads" "-D${prefix}.max.threads=${HADOOP_CATALINA_MAX_THREADS}"
  601. hadoop_add_param CATALINA_OPTS "${prefix}.ssl.keystore.file" "-D${prefix}.ssl.keystore.file=${HADOOP_CATALINA_SSL_KEYSTORE_FILE}"
  602. }
  603. function hadoop_finalize
  604. {
  605. # user classpath gets added at the last minute. this allows
  606. # override of CONF dirs and more
  607. hadoop_finalize_classpath
  608. hadoop_finalize_libpaths
  609. hadoop_finalize_hadoop_heap
  610. hadoop_finalize_hadoop_opts
  611. }
  612. function hadoop_exit_with_usage
  613. {
  614. # NOTE: This function is not user replaceable.
  615. local exitcode=$1
  616. if [[ -z $exitcode ]]; then
  617. exitcode=1
  618. fi
  619. if declare -F hadoop_usage >/dev/null ; then
  620. hadoop_usage
  621. elif [[ -x /usr/bin/cowsay ]]; then
  622. /usr/bin/cowsay -f elephant "Sorry, no help available."
  623. else
  624. hadoop_error "Sorry, no help available."
  625. fi
  626. exit $exitcode
  627. }
  628. function hadoop_verify_secure_prereq
  629. {
  630. # if you are on an OS like Illumos that has functional roles
  631. # and you are using pfexec, you'll probably want to change
  632. # this.
  633. # ${EUID} comes from the shell itself!
  634. if [[ "${EUID}" -ne 0 ]] && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  635. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  636. exit 1
  637. else
  638. return 0
  639. fi
  640. }
  641. function hadoop_setup_secure_service
  642. {
  643. # need a more complicated setup? replace me!
  644. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  645. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  646. }
  647. function hadoop_verify_piddir
  648. {
  649. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  650. hadoop_error "No pid directory defined."
  651. exit 1
  652. fi
  653. if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
  654. hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
  655. mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
  656. if [[ $? -gt 0 ]]; then
  657. hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
  658. exit 1
  659. fi
  660. fi
  661. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  662. if [[ $? -gt 0 ]]; then
  663. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  664. exit 1
  665. fi
  666. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  667. }
  668. function hadoop_verify_logdir
  669. {
  670. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  671. hadoop_error "No log directory defined."
  672. exit 1
  673. fi
  674. if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
  675. hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
  676. mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
  677. if [[ $? -gt 0 ]]; then
  678. hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
  679. exit 1
  680. fi
  681. fi
  682. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  683. if [[ $? -gt 0 ]]; then
  684. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  685. exit 1
  686. fi
  687. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  688. }
  689. function hadoop_status_daemon()
  690. {
  691. #
  692. # LSB 4.1.0 compatible status command (1)
  693. #
  694. # 0 = program is running
  695. # 1 = dead, but still a pid (2)
  696. # 2 = (not used by us)
  697. # 3 = not running
  698. #
  699. # 1 - this is not an endorsement of the LSB
  700. #
  701. # 2 - technically, the specification says /var/run/pid, so
  702. # we should never return this value, but we're giving
  703. # them the benefit of a doubt and returning 1 even if
  704. # our pid is not in in /var/run .
  705. #
  706. local pidfile=$1
  707. shift
  708. local pid
  709. if [[ -f "${pidfile}" ]]; then
  710. pid=$(cat "${pidfile}")
  711. if ps -p "${pid}" > /dev/null 2>&1; then
  712. return 0
  713. fi
  714. return 1
  715. fi
  716. return 3
  717. }
  718. function hadoop_java_exec
  719. {
  720. # run a java command. this is used for
  721. # non-daemons
  722. local command=$1
  723. local class=$2
  724. shift 2
  725. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  726. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  727. export CLASSPATH
  728. #shellcheck disable=SC2086
  729. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  730. }
  731. function hadoop_start_daemon
  732. {
  733. # this is our non-privileged daemon starter
  734. # that fires up a daemon in the *foreground*
  735. # so complex! so wow! much java!
  736. local command=$1
  737. local class=$2
  738. local pidfile=$3
  739. shift 3
  740. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  741. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  742. # this is for the non-daemon pid creation
  743. #shellcheck disable=SC2086
  744. echo $$ > "${pidfile}" 2>/dev/null
  745. if [[ $? -gt 0 ]]; then
  746. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  747. fi
  748. export CLASSPATH
  749. #shellcheck disable=SC2086
  750. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  751. }
  752. function hadoop_start_daemon_wrapper
  753. {
  754. # this is our non-privileged daemon start
  755. # that fires up a daemon in the *background*
  756. local daemonname=$1
  757. local class=$2
  758. local pidfile=$3
  759. local outfile=$4
  760. shift 4
  761. local counter
  762. hadoop_rotate_log "${outfile}"
  763. hadoop_start_daemon "${daemonname}" \
  764. "$class" \
  765. "${pidfile}" \
  766. "$@" >> "${outfile}" 2>&1 < /dev/null &
  767. # we need to avoid a race condition here
  768. # so let's wait for the fork to finish
  769. # before overriding with the daemonized pid
  770. (( counter=0 ))
  771. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  772. sleep 1
  773. (( counter++ ))
  774. done
  775. # this is for daemon pid creation
  776. #shellcheck disable=SC2086
  777. echo $! > "${pidfile}" 2>/dev/null
  778. if [[ $? -gt 0 ]]; then
  779. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  780. fi
  781. # shellcheck disable=SC2086
  782. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  783. if [[ $? -gt 0 ]]; then
  784. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  785. fi
  786. # shellcheck disable=SC2086
  787. disown %+ >/dev/null 2>&1
  788. if [[ $? -gt 0 ]]; then
  789. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  790. fi
  791. sleep 1
  792. # capture the ulimit output
  793. ulimit -a >> "${outfile}" 2>&1
  794. # shellcheck disable=SC2086
  795. if ! ps -p $! >/dev/null 2>&1; then
  796. return 1
  797. fi
  798. return 0
  799. }
  800. function hadoop_start_secure_daemon
  801. {
  802. # this is used to launch a secure daemon in the *foreground*
  803. #
  804. local daemonname=$1
  805. local class=$2
  806. # pid file to create for our deamon
  807. local daemonpidfile=$3
  808. # where to send stdout. jsvc has bad habits so this *may* be &1
  809. # which means you send it to stdout!
  810. local daemonoutfile=$4
  811. # where to send stderr. same thing, except &2 = stderr
  812. local daemonerrfile=$5
  813. local privpidfile=$6
  814. shift 6
  815. hadoop_rotate_log "${daemonoutfile}"
  816. hadoop_rotate_log "${daemonerrfile}"
  817. jsvc="${JSVC_HOME}/jsvc"
  818. if [[ ! -f "${jsvc}" ]]; then
  819. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  820. hadoop_error "or privileged daemons. Please download and install jsvc from "
  821. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  822. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  823. exit 1
  824. fi
  825. # note that shellcheck will throw a
  826. # bogus for-our-use-case 2086 here.
  827. # it doesn't properly support multi-line situations
  828. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  829. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  830. #shellcheck disable=SC2086
  831. echo $$ > "${privpidfile}" 2>/dev/null
  832. if [[ $? -gt 0 ]]; then
  833. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  834. fi
  835. exec "${jsvc}" \
  836. "-Dproc_${daemonname}" \
  837. -outfile "${daemonoutfile}" \
  838. -errfile "${daemonerrfile}" \
  839. -pidfile "${daemonpidfile}" \
  840. -nodetach \
  841. -user "${HADOOP_SECURE_USER}" \
  842. -cp "${CLASSPATH}" \
  843. ${HADOOP_OPTS} \
  844. "${class}" "$@"
  845. }
  846. function hadoop_start_secure_daemon_wrapper
  847. {
  848. # this wraps hadoop_start_secure_daemon to take care
  849. # of the dirty work to launch a daemon in the background!
  850. local daemonname=$1
  851. local class=$2
  852. # same rules as hadoop_start_secure_daemon except we
  853. # have some additional parameters
  854. local daemonpidfile=$3
  855. local daemonoutfile=$4
  856. # the pid file of the subprocess that spawned our
  857. # secure launcher
  858. local jsvcpidfile=$5
  859. # the output of the subprocess that spawned our secure
  860. # launcher
  861. local jsvcoutfile=$6
  862. local daemonerrfile=$7
  863. shift 7
  864. local counter
  865. hadoop_rotate_log "${jsvcoutfile}"
  866. hadoop_start_secure_daemon \
  867. "${daemonname}" \
  868. "${class}" \
  869. "${daemonpidfile}" \
  870. "${daemonoutfile}" \
  871. "${daemonerrfile}" \
  872. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  873. # we need to avoid a race condition here
  874. # so let's wait for the fork to finish
  875. # before overriding with the daemonized pid
  876. (( counter=0 ))
  877. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  878. sleep 1
  879. (( counter++ ))
  880. done
  881. # this is for the daemon pid creation
  882. #shellcheck disable=SC2086
  883. echo $! > "${jsvcpidfile}" 2>/dev/null
  884. if [[ $? -gt 0 ]]; then
  885. hadoop_error "ERROR: Cannot write ${daemonname} pid ${daemonpidfile}."
  886. fi
  887. sleep 1
  888. #shellcheck disable=SC2086
  889. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  890. if [[ $? -gt 0 ]]; then
  891. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  892. fi
  893. if [[ -f "${daemonpidfile}" ]]; then
  894. #shellcheck disable=SC2046
  895. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  896. if [[ $? -gt 0 ]]; then
  897. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  898. fi
  899. fi
  900. #shellcheck disable=SC2046
  901. disown %+ >/dev/null 2>&1
  902. if [[ $? -gt 0 ]]; then
  903. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  904. fi
  905. # capture the ulimit output
  906. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  907. #shellcheck disable=SC2086
  908. if ! ps -p $! >/dev/null 2>&1; then
  909. return 1
  910. fi
  911. return 0
  912. }
  913. function hadoop_stop_daemon
  914. {
  915. local cmd=$1
  916. local pidfile=$2
  917. shift 2
  918. local pid
  919. if [[ -f "${pidfile}" ]]; then
  920. pid=$(cat "$pidfile")
  921. kill "${pid}" >/dev/null 2>&1
  922. sleep "${HADOOP_STOP_TIMEOUT}"
  923. if kill -0 "${pid}" > /dev/null 2>&1; then
  924. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  925. kill -9 "${pid}" >/dev/null 2>&1
  926. fi
  927. if ps -p "${pid}" > /dev/null 2>&1; then
  928. hadoop_error "ERROR: Unable to kill ${pid}"
  929. else
  930. rm -f "${pidfile}" >/dev/null 2>&1
  931. fi
  932. fi
  933. }
  934. function hadoop_stop_secure_daemon
  935. {
  936. local command=$1
  937. local daemonpidfile=$2
  938. local privpidfile=$3
  939. shift 3
  940. local ret
  941. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  942. ret=$?
  943. rm -f "${daemonpidfile}" "${privpidfile}" 2>/dev/null
  944. return ${ret}
  945. }
  946. function hadoop_daemon_handler
  947. {
  948. local daemonmode=$1
  949. local daemonname=$2
  950. local class=$3
  951. local daemon_pidfile=$4
  952. local daemon_outfile=$5
  953. shift 5
  954. case ${daemonmode} in
  955. status)
  956. hadoop_status_daemon "${daemon_pidfile}"
  957. exit $?
  958. ;;
  959. stop)
  960. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  961. exit $?
  962. ;;
  963. ##COMPAT -- older hadoops would also start daemons by default
  964. start|default)
  965. hadoop_verify_piddir
  966. hadoop_verify_logdir
  967. hadoop_status_daemon "${daemon_pidfile}"
  968. if [[ $? == 0 ]]; then
  969. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  970. exit 1
  971. else
  972. # stale pid file, so just remove it and continue on
  973. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  974. fi
  975. ##COMPAT - differenticate between --daemon start and nothing
  976. # "nothing" shouldn't detach
  977. if [[ "$daemonmode" = "default" ]]; then
  978. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  979. else
  980. hadoop_start_daemon_wrapper "${daemonname}" \
  981. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  982. fi
  983. ;;
  984. esac
  985. }
  986. function hadoop_secure_daemon_handler
  987. {
  988. local daemonmode=$1
  989. local daemonname=$2
  990. local classname=$3
  991. local daemon_pidfile=$4
  992. local daemon_outfile=$5
  993. local priv_pidfile=$6
  994. local priv_outfile=$7
  995. local priv_errfile=$8
  996. shift 8
  997. case ${daemonmode} in
  998. status)
  999. hadoop_status_daemon "${daemon_pidfile}"
  1000. exit $?
  1001. ;;
  1002. stop)
  1003. hadoop_stop_secure_daemon "${daemonname}" \
  1004. "${daemon_pidfile}" "${priv_pidfile}"
  1005. exit $?
  1006. ;;
  1007. ##COMPAT -- older hadoops would also start daemons by default
  1008. start|default)
  1009. hadoop_verify_piddir
  1010. hadoop_verify_logdir
  1011. hadoop_status_daemon "${daemon_pidfile}"
  1012. if [[ $? == 0 ]]; then
  1013. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  1014. exit 1
  1015. else
  1016. # stale pid file, so just remove it and continue on
  1017. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1018. fi
  1019. ##COMPAT - differenticate between --daemon start and nothing
  1020. # "nothing" shouldn't detach
  1021. if [[ "${daemonmode}" = "default" ]]; then
  1022. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  1023. "${daemon_pidfile}" "${daemon_outfile}" \
  1024. "${priv_errfile}" "${priv_pidfile}" "$@"
  1025. else
  1026. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  1027. "${daemon_pidfile}" "${daemon_outfile}" \
  1028. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  1029. fi
  1030. ;;
  1031. esac
  1032. }
  1033. function hadoop_verify_user
  1034. {
  1035. local command=$1
  1036. local uservar="HADOOP_${command}_USER"
  1037. if [[ -n ${!uservar} ]]; then
  1038. if [[ ${!uservar} != ${USER} ]]; then
  1039. hadoop_error "ERROR: ${command} can only be executed by ${!uservar}."
  1040. exit 1
  1041. fi
  1042. fi
  1043. }