hadoop-functions.sh 33 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168
  1. #!/bin/bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. function hadoop_error
  17. {
  18. # NOTE: This function is not user replaceable.
  19. echo "$*" 1>&2
  20. }
  21. function hadoop_debug
  22. {
  23. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  24. echo "DEBUG: $*" 1>&2
  25. fi
  26. }
  27. function hadoop_bootstrap_init
  28. {
  29. # NOTE: This function is not user replaceable.
  30. # the root of the Hadoop installation
  31. # See HADOOP-6255 for the expected directory structure layout
  32. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  33. # We can piggyback off of that to figure out where the default
  34. # HADOOP_FREFIX should be. This allows us to run without
  35. # HADOOP_PREFIX ever being defined by a human! As a consequence
  36. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  37. # env var within Hadoop.
  38. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  39. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  40. exit 1
  41. fi
  42. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  43. HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_DEFAULT_PREFIX}
  44. export HADOOP_PREFIX
  45. #
  46. # short-cuts. vendors may redefine these as well, preferably
  47. # in hadoop-layouts.sh
  48. #
  49. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  50. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  51. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  52. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  53. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  54. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  55. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  56. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  57. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  58. # setup a default TOOL_PATH
  59. TOOL_PATH=${TOOL_PATH:-${HADOOP_PREFIX}/share/hadoop/tools/lib/*}
  60. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  61. # defaults
  62. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  63. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  64. }
  65. function hadoop_find_confdir
  66. {
  67. # NOTE: This function is not user replaceable.
  68. local conf_dir
  69. # Look for the basic hadoop configuration area.
  70. #
  71. #
  72. # An attempt at compatibility with some Hadoop 1.x
  73. # installs.
  74. if [[ -e "${HADOOP_PREFIX}/conf/hadoop-env.sh" ]]; then
  75. conf_dir="conf"
  76. else
  77. conf_dir="etc/hadoop"
  78. fi
  79. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_PREFIX}/${conf_dir}}"
  80. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  81. }
  82. function hadoop_exec_hadoopenv
  83. {
  84. # NOTE: This function is not user replaceable.
  85. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  86. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  87. export HADOOP_ENV_PROCESSED=true
  88. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  89. fi
  90. fi
  91. }
  92. function hadoop_exec_userfuncs
  93. {
  94. # NOTE: This function is not user replaceable.
  95. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  96. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  97. fi
  98. }
  99. function hadoop_basic_init
  100. {
  101. # Some of these are also set in hadoop-env.sh.
  102. # we still set them here just in case hadoop-env.sh is
  103. # broken in some way, set up defaults, etc.
  104. #
  105. # but it is important to note that if you update these
  106. # you also need to update hadoop-env.sh as well!!!
  107. # CLASSPATH initially contains $HADOOP_CONF_DIR
  108. CLASSPATH="${HADOOP_CONF_DIR}"
  109. hadoop_debug "Initial CLASSPATH=${HADOOP_CONF_DIR}"
  110. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  111. [[ -d "${HADOOP_PREFIX}/${HADOOP_COMMON_DIR}" ]]; then
  112. export HADOOP_COMMON_HOME="${HADOOP_PREFIX}"
  113. fi
  114. # default policy file for service-level authorization
  115. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  116. # define HADOOP_HDFS_HOME
  117. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  118. [[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then
  119. export HADOOP_HDFS_HOME="${HADOOP_PREFIX}"
  120. fi
  121. # define HADOOP_YARN_HOME
  122. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  123. [[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then
  124. export HADOOP_YARN_HOME="${HADOOP_PREFIX}"
  125. fi
  126. # define HADOOP_MAPRED_HOME
  127. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  128. [[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then
  129. export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
  130. fi
  131. HADOOP_IDENT_STRING=${HADOP_IDENT_STRING:-$USER}
  132. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
  133. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  134. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  135. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  136. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  137. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  138. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  139. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  140. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  141. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS:-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  142. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  143. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  144. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  145. }
  146. function hadoop_populate_slaves_file()
  147. {
  148. # NOTE: This function is not user replaceable.
  149. local slavesfile=$1
  150. shift
  151. if [[ -f "${slavesfile}" ]]; then
  152. # shellcheck disable=2034
  153. HADOOP_SLAVES="${slavesfile}"
  154. elif [[ -f "${HADOOP_CONF_DIR}/${slavesfile}" ]]; then
  155. # shellcheck disable=2034
  156. HADOOP_SLAVES="${HADOOP_CONF_DIR}/${slavesfile}"
  157. # shellcheck disable=2034
  158. YARN_SLAVES="${HADOOP_CONF_DIR}/${slavesfile}"
  159. else
  160. hadoop_error "ERROR: Cannot find hosts file \"${slavesfile}\""
  161. hadoop_exit_with_usage 1
  162. fi
  163. }
  164. function hadoop_rotate_log
  165. {
  166. #
  167. # log rotation (mainly used for .out files)
  168. # Users are likely to replace this one for something
  169. # that gzips or uses dates or who knows what.
  170. #
  171. # be aware that &1 and &2 might go through here
  172. # so don't do anything too crazy...
  173. #
  174. local log=$1;
  175. local num=${2:-5};
  176. if [[ -f "${log}" ]]; then # rotate logs
  177. while [[ ${num} -gt 1 ]]; do
  178. #shellcheck disable=SC2086
  179. let prev=${num}-1
  180. if [[ -f "${log}.${prev}" ]]; then
  181. mv "${log}.${prev}" "${log}.${num}"
  182. fi
  183. num=${prev}
  184. done
  185. mv "${log}" "${log}.${num}"
  186. fi
  187. }
  188. function hadoop_actual_ssh
  189. {
  190. # we are passing this function to xargs
  191. # should get hostname followed by rest of command line
  192. local slave=$1
  193. shift
  194. # shellcheck disable=SC2086
  195. ssh ${HADOOP_SSH_OPTS} ${slave} $"${@// /\\ }" 2>&1 | sed "s/^/$slave: /"
  196. }
  197. function hadoop_connect_to_hosts
  198. {
  199. # shellcheck disable=SC2124
  200. local params="$@"
  201. #
  202. # ssh (or whatever) to a host
  203. #
  204. # User can specify hostnames or a file where the hostnames are (not both)
  205. if [[ -n "${HADOOP_SLAVES}" && -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  206. hadoop_error "ERROR: Both HADOOP_SLAVES and HADOOP_SLAVE_NAME were defined. Aborting."
  207. exit 1
  208. fi
  209. if [[ -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  210. SLAVE_NAMES=${HADOOP_SLAVE_NAMES}
  211. else
  212. SLAVE_FILE=${HADOOP_SLAVES:-${HADOOP_CONF_DIR}/slaves}
  213. fi
  214. # if pdsh is available, let's use it. otherwise default
  215. # to a loop around ssh. (ugh)
  216. if [[ -e '/usr/bin/pdsh' ]]; then
  217. if [[ -z "${HADOOP_SLAVE_NAMES}" ]] ; then
  218. # if we were given a file, just let pdsh deal with it.
  219. # shellcheck disable=SC2086
  220. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  221. -f "${HADOOP_SSH_PARALLEL}" -w ^"${SLAVE_FILE}" $"${@// /\\ }" 2>&1
  222. else
  223. # no spaces allowed in the pdsh arg host list
  224. # shellcheck disable=SC2086
  225. SLAVE_NAMES=$(echo ${SLAVE_NAMES} | tr -s ' ' ,)
  226. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  227. -f "${HADOOP_SSH_PARALLEL}" -w "${SLAVE_NAMES}" $"${@// /\\ }" 2>&1
  228. fi
  229. else
  230. if [[ -z "${SLAVE_NAMES}" ]]; then
  231. SLAVE_NAMES=$(sed 's/#.*$//;/^$/d' "${SLAVE_FILE}")
  232. fi
  233. # quoting here gets tricky. it's easier to push it into a function
  234. # so that we don't have to deal with it. However...
  235. # xargs can't use a function so instead we'll export it out
  236. # and force it into a subshell
  237. # moral of the story: just use pdsh.
  238. export -f hadoop_actual_ssh
  239. export HADOOP_SSH_OPTS
  240. # xargs is used with option -I to replace the placeholder in arguments
  241. # list with each hostname read from stdin/pipe. But it consider one
  242. # line as one argument while reading from stdin/pipe. So place each
  243. # hostname in different lines while passing via pipe.
  244. SLAVE_NAMES=$(echo "$SLAVE_NAMES" | tr ' ' '\n' )
  245. echo "${SLAVE_NAMES}" | \
  246. xargs -n 1 -P"${HADOOP_SSH_PARALLEL}" \
  247. -I {} bash -c -- "hadoop_actual_ssh {} ${params}"
  248. wait
  249. fi
  250. }
  251. function hadoop_validate_classname
  252. {
  253. local class=$1
  254. shift 1
  255. if [[ ! ${class} =~ \. ]]; then
  256. # assuming the arg is typo of command if it does not conatain ".".
  257. # class belonging to no package is not allowed as a result.
  258. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  259. return 1
  260. fi
  261. return 0
  262. }
  263. function hadoop_add_param
  264. {
  265. #
  266. # general param dedupe..
  267. # $1 is what we are adding to
  268. # $2 is the name of what we want to add (key)
  269. # $3 is the key+value of what we're adding
  270. #
  271. # doing it this way allows us to support all sorts of
  272. # different syntaxes, just so long as they are space
  273. # delimited
  274. #
  275. if [[ ! ${!1} =~ $2 ]] ; then
  276. # shellcheck disable=SC2086
  277. eval $1="'${!1} $3'"
  278. hadoop_debug "$1 accepted $3"
  279. else
  280. hadoop_debug "$1 declined $3"
  281. fi
  282. }
  283. function hadoop_add_classpath
  284. {
  285. # two params:
  286. # $1 = directory, file, wildcard, whatever to add
  287. # $2 = before or after, which determines where in the
  288. # classpath this object should go. default is after
  289. # return 0 = success (added or duplicate)
  290. # return 1 = failure (doesn't exist, whatever)
  291. # However, with classpath (& JLP), we can do dedupe
  292. # along with some sanity checking (e.g., missing directories)
  293. # since we have a better idea of what is legal
  294. #
  295. # for wildcard at end, we can
  296. # at least check the dir exists
  297. if [[ $1 =~ ^.*\*$ ]]; then
  298. local mp=$(dirname "$1")
  299. if [[ ! -d "${mp}" ]]; then
  300. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  301. return 1
  302. fi
  303. # no wildcard in the middle, so check existence
  304. # (doesn't matter *what* it is)
  305. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  306. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  307. return 1
  308. fi
  309. if [[ -z "${CLASSPATH}" ]]; then
  310. CLASSPATH=$1
  311. hadoop_debug "Initial CLASSPATH=$1"
  312. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  313. if [[ "$2" = "before" ]]; then
  314. CLASSPATH="$1:${CLASSPATH}"
  315. hadoop_debug "Prepend CLASSPATH: $1"
  316. else
  317. CLASSPATH+=:$1
  318. hadoop_debug "Append CLASSPATH: $1"
  319. fi
  320. else
  321. hadoop_debug "Dupe CLASSPATH: $1"
  322. fi
  323. return 0
  324. }
  325. function hadoop_add_colonpath
  326. {
  327. # two params:
  328. # $1 = directory, file, wildcard, whatever to add
  329. # $2 = before or after, which determines where in the
  330. # classpath this object should go
  331. # return 0 = success
  332. # return 1 = failure (duplicate)
  333. # this is CLASSPATH, JLP, etc but with dedupe but no
  334. # other checking
  335. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  336. if [[ -z "${!1}" ]]; then
  337. # shellcheck disable=SC2086
  338. eval $1="'$2'"
  339. hadoop_debug "Initial colonpath($1): $2"
  340. elif [[ "$3" = "before" ]]; then
  341. # shellcheck disable=SC2086
  342. eval $1="'$2:${!1}'"
  343. hadoop_debug "Prepend colonpath($1): $2"
  344. else
  345. # shellcheck disable=SC2086
  346. eval $1+="'$2'"
  347. hadoop_debug "Append colonpath($1): $2"
  348. fi
  349. return 0
  350. fi
  351. hadoop_debug "Rejected colonpath($1): $2"
  352. return 1
  353. }
  354. function hadoop_add_javalibpath
  355. {
  356. # specialized function for a common use case
  357. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  358. }
  359. function hadoop_add_ldlibpath
  360. {
  361. # specialized function for a common use case
  362. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  363. # note that we export this
  364. export LD_LIBRARY_PATH
  365. }
  366. function hadoop_add_to_classpath_common
  367. {
  368. #
  369. # get all of the common jars+config in the path
  370. #
  371. # developers
  372. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  373. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  374. fi
  375. if [[ -d "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}/webapps" ]]; then
  376. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"
  377. fi
  378. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  379. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  380. }
  381. function hadoop_add_to_classpath_hdfs
  382. {
  383. #
  384. # get all of the hdfs jars+config in the path
  385. #
  386. # developers
  387. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  388. hadoop_add_classpath "${HADOOP_HDFS_HOME}/hadoop-hdfs/target/classes"
  389. fi
  390. # put hdfs in classpath if present
  391. if [[ -d "${HADOOP_HDFS_HOME}/${HDFS_DIR}/webapps" ]]; then
  392. hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_DIR}"
  393. fi
  394. hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR}"'/*'
  395. hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_DIR}"'/*'
  396. }
  397. function hadoop_add_to_classpath_yarn
  398. {
  399. local i
  400. #
  401. # get all of the yarn jars+config in the path
  402. #
  403. # developers
  404. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  405. for i in yarn-api yarn-common yarn-mapreduce yarn-master-worker \
  406. yarn-server/yarn-server-nodemanager \
  407. yarn-server/yarn-server-common \
  408. yarn-server/yarn-server-resourcemanager; do
  409. hadoop_add_classpath "${HADOOP_YARN_HOME}/$i/target/classes"
  410. done
  411. hadoop_add_classpath "${HADOOP_YARN_HOME}/build/test/classes"
  412. hadoop_add_classpath "${HADOOP_YARN_HOME}/build/tools"
  413. fi
  414. if [[ -d "${HADOOP_YARN_HOME}/${YARN_DIR}/webapps" ]]; then
  415. hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}"
  416. fi
  417. hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}"'/*'
  418. hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}"'/*'
  419. }
  420. function hadoop_add_to_classpath_mapred
  421. {
  422. #
  423. # get all of the mapreduce jars+config in the path
  424. #
  425. # developers
  426. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  427. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-shuffle/target/classes"
  428. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-common/target/classes"
  429. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-hs/target/classes"
  430. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-hs-plugins/target/classes"
  431. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-app/target/classes"
  432. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-jobclient/target/classes"
  433. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-core/target/classes"
  434. fi
  435. if [[ -d "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}/webapps" ]]; then
  436. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}"
  437. fi
  438. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_LIB_JARS_DIR}"'/*'
  439. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}"'/*'
  440. }
  441. function hadoop_add_to_classpath_userpath
  442. {
  443. # Add the user-specified HADOOP_CLASSPATH to the
  444. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  445. # is not set.
  446. # Add it first or last depending on if user has
  447. # set env-var HADOOP_USER_CLASSPATH_FIRST
  448. # we'll also dedupe it, because we're cool like that.
  449. #
  450. local c
  451. local array
  452. local i
  453. local j
  454. let c=0
  455. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  456. # I wonder if Java runs on VMS.
  457. for i in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  458. array[$c]=$i
  459. let c+=1
  460. done
  461. let j=c-1
  462. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  463. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  464. for ((i=j; i>=0; i--)); do
  465. hadoop_add_classpath "${array[$i]}" before
  466. done
  467. else
  468. for ((i=0; i<=j; i++)); do
  469. hadoop_add_classpath "${array[$i]}" after
  470. done
  471. fi
  472. fi
  473. fi
  474. }
  475. function hadoop_os_tricks
  476. {
  477. local bindv6only
  478. # some OSes have special needs. here's some out of the box
  479. # examples for OS X and Linux. Vendors, replace this with your special sauce.
  480. case ${HADOOP_OS_TYPE} in
  481. Darwin)
  482. if [[ -z "${JAVA_HOME}" ]]; then
  483. if [[ -x /usr/libexec/java_home ]]; then
  484. export JAVA_HOME="$(/usr/libexec/java_home)"
  485. else
  486. export JAVA_HOME=/Library/Java/Home
  487. fi
  488. fi
  489. ;;
  490. Linux)
  491. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  492. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  493. # undocumented in hadoop-env.sh because we don't want users to
  494. # shoot themselves in the foot while devs make IPv6 work.
  495. if [[ -n "${bindv6only}" ]] &&
  496. [[ "${bindv6only}" -eq "1" ]] &&
  497. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  498. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  499. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  500. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  501. exit 1
  502. fi
  503. # Newer versions of glibc use an arena memory allocator that
  504. # causes virtual # memory usage to explode. This interacts badly
  505. # with the many threads that we use in Hadoop. Tune the variable
  506. # down to prevent vmem explosion.
  507. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  508. ;;
  509. esac
  510. }
  511. function hadoop_java_setup
  512. {
  513. # Bail if we did not detect it
  514. if [[ -z "${JAVA_HOME}" ]]; then
  515. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  516. exit 1
  517. fi
  518. if [[ ! -d "${JAVA_HOME}" ]]; then
  519. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  520. exit 1
  521. fi
  522. JAVA="${JAVA_HOME}/bin/java"
  523. if [[ ! -x "$JAVA" ]]; then
  524. hadoop_error "ERROR: $JAVA is not executable."
  525. exit 1
  526. fi
  527. }
  528. function hadoop_finalize_libpaths
  529. {
  530. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  531. hadoop_add_param HADOOP_OPTS java.library.path \
  532. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  533. export LD_LIBRARY_PATH
  534. fi
  535. }
  536. function hadoop_finalize_hadoop_heap
  537. {
  538. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  539. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  540. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  541. fi
  542. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  543. fi
  544. # backwards compatibility
  545. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  546. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  547. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  548. fi
  549. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  550. fi
  551. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  552. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  553. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  554. fi
  555. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  556. fi
  557. }
  558. #
  559. # fill in any last minute options that might not have been defined yet
  560. #
  561. function hadoop_finalize_hadoop_opts
  562. {
  563. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  564. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  565. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_PREFIX}"
  566. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  567. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  568. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  569. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  570. }
  571. function hadoop_finalize_classpath
  572. {
  573. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  574. # user classpath gets added at the last minute. this allows
  575. # override of CONF dirs and more
  576. hadoop_add_to_classpath_userpath
  577. }
  578. function hadoop_finalize
  579. {
  580. # user classpath gets added at the last minute. this allows
  581. # override of CONF dirs and more
  582. hadoop_finalize_classpath
  583. hadoop_finalize_libpaths
  584. hadoop_finalize_hadoop_heap
  585. hadoop_finalize_hadoop_opts
  586. }
  587. function hadoop_exit_with_usage
  588. {
  589. # NOTE: This function is not user replaceable.
  590. local exitcode=$1
  591. if [[ -z $exitcode ]]; then
  592. exitcode=1
  593. fi
  594. if declare -F hadoop_usage >/dev/null ; then
  595. hadoop_usage
  596. elif [[ -x /usr/bin/cowsay ]]; then
  597. /usr/bin/cowsay -f elephant "Sorry, no help available."
  598. else
  599. hadoop_error "Sorry, no help available."
  600. fi
  601. exit $exitcode
  602. }
  603. function hadoop_verify_secure_prereq
  604. {
  605. # if you are on an OS like Illumos that has functional roles
  606. # and you are using pfexec, you'll probably want to change
  607. # this.
  608. # ${EUID} comes from the shell itself!
  609. if [[ "${EUID}" -ne 0 ]] && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  610. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  611. exit 1
  612. else
  613. return 0
  614. fi
  615. }
  616. function hadoop_setup_secure_service
  617. {
  618. # need a more complicated setup? replace me!
  619. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  620. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  621. }
  622. function hadoop_verify_piddir
  623. {
  624. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  625. hadoop_error "No pid directory defined."
  626. exit 1
  627. fi
  628. if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
  629. hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
  630. mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
  631. if [[ $? -gt 0 ]]; then
  632. hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
  633. exit 1
  634. fi
  635. fi
  636. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  637. if [[ $? -gt 0 ]]; then
  638. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  639. exit 1
  640. fi
  641. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  642. }
  643. function hadoop_verify_logdir
  644. {
  645. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  646. hadoop_error "No log directory defined."
  647. exit 1
  648. fi
  649. if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
  650. hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
  651. mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
  652. if [[ $? -gt 0 ]]; then
  653. hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
  654. exit 1
  655. fi
  656. fi
  657. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  658. if [[ $? -gt 0 ]]; then
  659. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  660. exit 1
  661. fi
  662. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  663. }
  664. function hadoop_status_daemon()
  665. {
  666. #
  667. # LSB 4.1.0 compatible status command (1)
  668. #
  669. # 0 = program is running
  670. # 1 = dead, but still a pid (2)
  671. # 2 = (not used by us)
  672. # 3 = not running
  673. #
  674. # 1 - this is not an endorsement of the LSB
  675. #
  676. # 2 - technically, the specification says /var/run/pid, so
  677. # we should never return this value, but we're giving
  678. # them the benefit of a doubt and returning 1 even if
  679. # our pid is not in in /var/run .
  680. #
  681. local pidfile=$1
  682. shift
  683. local pid
  684. if [[ -f "${pidfile}" ]]; then
  685. pid=$(cat "${pidfile}")
  686. if ps -p "${pid}" > /dev/null 2>&1; then
  687. return 0
  688. fi
  689. return 1
  690. fi
  691. return 3
  692. }
  693. function hadoop_java_exec
  694. {
  695. # run a java command. this is used for
  696. # non-daemons
  697. local command=$1
  698. local class=$2
  699. shift 2
  700. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  701. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  702. export CLASSPATH
  703. #shellcheck disable=SC2086
  704. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  705. }
  706. function hadoop_start_daemon
  707. {
  708. # this is our non-privileged daemon starter
  709. # that fires up a daemon in the *foreground*
  710. # so complex! so wow! much java!
  711. local command=$1
  712. local class=$2
  713. local pidfile=$3
  714. shift 3
  715. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  716. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  717. # this is for the non-daemon pid creation
  718. #shellcheck disable=SC2086
  719. echo $$ > "${pidfile}" 2>/dev/null
  720. if [[ $? -gt 0 ]]; then
  721. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  722. fi
  723. export CLASSPATH
  724. #shellcheck disable=SC2086
  725. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  726. }
  727. function hadoop_start_daemon_wrapper
  728. {
  729. # this is our non-privileged daemon start
  730. # that fires up a daemon in the *background*
  731. local daemonname=$1
  732. local class=$2
  733. local pidfile=$3
  734. local outfile=$4
  735. shift 4
  736. local counter
  737. hadoop_rotate_log "${outfile}"
  738. hadoop_start_daemon "${daemonname}" \
  739. "$class" \
  740. "${pidfile}" \
  741. "$@" >> "${outfile}" 2>&1 < /dev/null &
  742. # we need to avoid a race condition here
  743. # so let's wait for the fork to finish
  744. # before overriding with the daemonized pid
  745. (( counter=0 ))
  746. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  747. sleep 1
  748. (( counter++ ))
  749. done
  750. # this is for daemon pid creation
  751. #shellcheck disable=SC2086
  752. echo $! > "${pidfile}" 2>/dev/null
  753. if [[ $? -gt 0 ]]; then
  754. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  755. fi
  756. # shellcheck disable=SC2086
  757. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  758. if [[ $? -gt 0 ]]; then
  759. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  760. fi
  761. # shellcheck disable=SC2086
  762. disown %+ >/dev/null 2>&1
  763. if [[ $? -gt 0 ]]; then
  764. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  765. fi
  766. sleep 1
  767. # capture the ulimit output
  768. ulimit -a >> "${outfile}" 2>&1
  769. # shellcheck disable=SC2086
  770. if ! ps -p $! >/dev/null 2>&1; then
  771. return 1
  772. fi
  773. return 0
  774. }
  775. function hadoop_start_secure_daemon
  776. {
  777. # this is used to launch a secure daemon in the *foreground*
  778. #
  779. local daemonname=$1
  780. local class=$2
  781. # pid file to create for our deamon
  782. local daemonpidfile=$3
  783. # where to send stdout. jsvc has bad habits so this *may* be &1
  784. # which means you send it to stdout!
  785. local daemonoutfile=$4
  786. # where to send stderr. same thing, except &2 = stderr
  787. local daemonerrfile=$5
  788. local privpidfile=$6
  789. shift 6
  790. hadoop_rotate_log "${daemonoutfile}"
  791. hadoop_rotate_log "${daemonerrfile}"
  792. jsvc="${JSVC_HOME}/jsvc"
  793. if [[ ! -f "${jsvc}" ]]; then
  794. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  795. hadoop_error "or privileged daemons. Please download and install jsvc from "
  796. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  797. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  798. exit 1
  799. fi
  800. # note that shellcheck will throw a
  801. # bogus for-our-use-case 2086 here.
  802. # it doesn't properly support multi-line situations
  803. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  804. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  805. #shellcheck disable=SC2086
  806. echo $$ > "${privpidfile}" 2>/dev/null
  807. if [[ $? -gt 0 ]]; then
  808. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  809. fi
  810. exec "${jsvc}" \
  811. "-Dproc_${daemonname}" \
  812. -outfile "${daemonoutfile}" \
  813. -errfile "${daemonerrfile}" \
  814. -pidfile "${daemonpidfile}" \
  815. -nodetach \
  816. -user "${HADOOP_SECURE_USER}" \
  817. -cp "${CLASSPATH}" \
  818. ${HADOOP_OPTS} \
  819. "${class}" "$@"
  820. }
  821. function hadoop_start_secure_daemon_wrapper
  822. {
  823. # this wraps hadoop_start_secure_daemon to take care
  824. # of the dirty work to launch a daemon in the background!
  825. local daemonname=$1
  826. local class=$2
  827. # same rules as hadoop_start_secure_daemon except we
  828. # have some additional parameters
  829. local daemonpidfile=$3
  830. local daemonoutfile=$4
  831. # the pid file of the subprocess that spawned our
  832. # secure launcher
  833. local jsvcpidfile=$5
  834. # the output of the subprocess that spawned our secure
  835. # launcher
  836. local jsvcoutfile=$6
  837. local daemonerrfile=$7
  838. shift 7
  839. local counter
  840. hadoop_rotate_log "${jsvcoutfile}"
  841. hadoop_start_secure_daemon \
  842. "${daemonname}" \
  843. "${class}" \
  844. "${daemonpidfile}" \
  845. "${daemonoutfile}" \
  846. "${daemonerrfile}" \
  847. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  848. # we need to avoid a race condition here
  849. # so let's wait for the fork to finish
  850. # before overriding with the daemonized pid
  851. (( counter=0 ))
  852. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  853. sleep 1
  854. (( counter++ ))
  855. done
  856. # this is for the daemon pid creation
  857. #shellcheck disable=SC2086
  858. echo $! > "${jsvcpidfile}" 2>/dev/null
  859. if [[ $? -gt 0 ]]; then
  860. hadoop_error "ERROR: Cannot write ${daemonname} pid ${daemonpidfile}."
  861. fi
  862. sleep 1
  863. #shellcheck disable=SC2086
  864. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  865. if [[ $? -gt 0 ]]; then
  866. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  867. fi
  868. if [[ -f "${daemonpidfile}" ]]; then
  869. #shellcheck disable=SC2046
  870. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  871. if [[ $? -gt 0 ]]; then
  872. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  873. fi
  874. fi
  875. #shellcheck disable=SC2046
  876. disown %+ >/dev/null 2>&1
  877. if [[ $? -gt 0 ]]; then
  878. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  879. fi
  880. # capture the ulimit output
  881. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  882. #shellcheck disable=SC2086
  883. if ! ps -p $! >/dev/null 2>&1; then
  884. return 1
  885. fi
  886. return 0
  887. }
  888. function hadoop_stop_daemon
  889. {
  890. local cmd=$1
  891. local pidfile=$2
  892. shift 2
  893. local pid
  894. if [[ -f "${pidfile}" ]]; then
  895. pid=$(cat "$pidfile")
  896. kill "${pid}" >/dev/null 2>&1
  897. sleep "${HADOOP_STOP_TIMEOUT}"
  898. if kill -0 "${pid}" > /dev/null 2>&1; then
  899. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  900. kill -9 "${pid}" >/dev/null 2>&1
  901. fi
  902. if ps -p "${pid}" > /dev/null 2>&1; then
  903. hadoop_error "ERROR: Unable to kill ${pid}"
  904. else
  905. rm -f "${pidfile}" >/dev/null 2>&1
  906. fi
  907. fi
  908. }
  909. function hadoop_stop_secure_daemon
  910. {
  911. local command=$1
  912. local daemonpidfile=$2
  913. local privpidfile=$3
  914. shift 3
  915. local ret
  916. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  917. ret=$?
  918. rm -f "${daemonpidfile}" "${privpidfile}" 2>/dev/null
  919. return ${ret}
  920. }
  921. function hadoop_daemon_handler
  922. {
  923. local daemonmode=$1
  924. local daemonname=$2
  925. local class=$3
  926. local daemon_pidfile=$4
  927. local daemon_outfile=$5
  928. shift 5
  929. case ${daemonmode} in
  930. status)
  931. hadoop_status_daemon "${daemon_pidfile}"
  932. exit $?
  933. ;;
  934. stop)
  935. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  936. exit $?
  937. ;;
  938. ##COMPAT -- older hadoops would also start daemons by default
  939. start|default)
  940. hadoop_verify_piddir
  941. hadoop_verify_logdir
  942. hadoop_status_daemon "${daemon_pidfile}"
  943. if [[ $? == 0 ]]; then
  944. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  945. exit 1
  946. else
  947. # stale pid file, so just remove it and continue on
  948. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  949. fi
  950. ##COMPAT - differenticate between --daemon start and nothing
  951. # "nothing" shouldn't detach
  952. if [[ "$daemonmode" = "default" ]]; then
  953. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  954. else
  955. hadoop_start_daemon_wrapper "${daemonname}" \
  956. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  957. fi
  958. ;;
  959. esac
  960. }
  961. function hadoop_secure_daemon_handler
  962. {
  963. local daemonmode=$1
  964. local daemonname=$2
  965. local classname=$3
  966. local daemon_pidfile=$4
  967. local daemon_outfile=$5
  968. local priv_pidfile=$6
  969. local priv_outfile=$7
  970. local priv_errfile=$8
  971. shift 8
  972. case ${daemonmode} in
  973. status)
  974. hadoop_status_daemon "${daemon_pidfile}"
  975. exit $?
  976. ;;
  977. stop)
  978. hadoop_stop_secure_daemon "${daemonname}" \
  979. "${daemon_pidfile}" "${priv_pidfile}"
  980. exit $?
  981. ;;
  982. ##COMPAT -- older hadoops would also start daemons by default
  983. start|default)
  984. hadoop_verify_piddir
  985. hadoop_verify_logdir
  986. hadoop_status_daemon "${daemon_pidfile}"
  987. if [[ $? == 0 ]]; then
  988. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  989. exit 1
  990. else
  991. # stale pid file, so just remove it and continue on
  992. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  993. fi
  994. ##COMPAT - differenticate between --daemon start and nothing
  995. # "nothing" shouldn't detach
  996. if [[ "${daemonmode}" = "default" ]]; then
  997. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  998. "${daemon_pidfile}" "${daemon_outfile}" \
  999. "${priv_errfile}" "${priv_pidfile}" "$@"
  1000. else
  1001. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  1002. "${daemon_pidfile}" "${daemon_outfile}" \
  1003. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  1004. fi
  1005. ;;
  1006. esac
  1007. }
  1008. function hadoop_verify_user
  1009. {
  1010. local command=$1
  1011. local uservar="HADOOP_${command}_USER"
  1012. if [[ -n ${!uservar} ]]; then
  1013. if [[ ${!uservar} != ${USER} ]]; then
  1014. hadoop_error "ERROR: ${command} can only be executed by ${!uservar}."
  1015. exit 1
  1016. fi
  1017. fi
  1018. }