1
0

hadoop-functions.sh 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179
  1. #!/bin/bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. function hadoop_error
  17. {
  18. # NOTE: This function is not user replaceable.
  19. echo "$*" 1>&2
  20. }
  21. function hadoop_debug
  22. {
  23. if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
  24. echo "DEBUG: $*" 1>&2
  25. fi
  26. }
  27. function hadoop_bootstrap_init
  28. {
  29. # NOTE: This function is not user replaceable.
  30. # the root of the Hadoop installation
  31. # See HADOOP-6255 for the expected directory structure layout
  32. # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
  33. # We can piggyback off of that to figure out where the default
  34. # HADOOP_FREFIX should be. This allows us to run without
  35. # HADOOP_PREFIX ever being defined by a human! As a consequence
  36. # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
  37. # env var within Hadoop.
  38. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  39. hadoop_error "HADOOP_LIBEXEC_DIR is not defined. Exiting."
  40. exit 1
  41. fi
  42. HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
  43. HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_DEFAULT_PREFIX}
  44. export HADOOP_PREFIX
  45. #
  46. # short-cuts. vendors may redefine these as well, preferably
  47. # in hadoop-layouts.sh
  48. #
  49. HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
  50. HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
  51. HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
  52. HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
  53. HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
  54. YARN_DIR=${YARN_DIR:-"share/hadoop/yarn"}
  55. YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
  56. MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
  57. MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
  58. # setup a default TOOL_PATH
  59. TOOL_PATH=${TOOL_PATH:-${HADOOP_PREFIX}/share/hadoop/tools/lib/*}
  60. export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
  61. # defaults
  62. export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
  63. hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
  64. }
  65. function hadoop_find_confdir
  66. {
  67. # NOTE: This function is not user replaceable.
  68. local conf_dir
  69. # Look for the basic hadoop configuration area.
  70. #
  71. #
  72. # An attempt at compatibility with some Hadoop 1.x
  73. # installs.
  74. if [[ -e "${HADOOP_PREFIX}/conf/hadoop-env.sh" ]]; then
  75. conf_dir="conf"
  76. else
  77. conf_dir="etc/hadoop"
  78. fi
  79. export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_PREFIX}/${conf_dir}}"
  80. hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
  81. }
  82. function hadoop_exec_hadoopenv
  83. {
  84. # NOTE: This function is not user replaceable.
  85. if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
  86. if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
  87. export HADOOP_ENV_PROCESSED=true
  88. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  89. fi
  90. fi
  91. }
  92. function hadoop_exec_userfuncs
  93. {
  94. # NOTE: This function is not user replaceable.
  95. if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
  96. . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
  97. fi
  98. }
  99. function hadoop_exec_hadooprc
  100. {
  101. # Read the user's settings. This provides for users to override
  102. # and/or append hadoop-env.sh. It is not meant as a complete system override.
  103. if [[ -f "${HOME}/.hadooprc" ]]; then
  104. hadoop_debug "Applying the user's .hadooprc"
  105. . "${HOME}/.hadooprc"
  106. fi
  107. }
  108. function hadoop_basic_init
  109. {
  110. # Some of these are also set in hadoop-env.sh.
  111. # we still set them here just in case hadoop-env.sh is
  112. # broken in some way, set up defaults, etc.
  113. #
  114. # but it is important to note that if you update these
  115. # you also need to update hadoop-env.sh as well!!!
  116. # CLASSPATH initially contains $HADOOP_CONF_DIR
  117. CLASSPATH="${HADOOP_CONF_DIR}"
  118. hadoop_debug "Initial CLASSPATH=${HADOOP_CONF_DIR}"
  119. if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
  120. [[ -d "${HADOOP_PREFIX}/${HADOOP_COMMON_DIR}" ]]; then
  121. export HADOOP_COMMON_HOME="${HADOOP_PREFIX}"
  122. fi
  123. # default policy file for service-level authorization
  124. HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
  125. # define HADOOP_HDFS_HOME
  126. if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
  127. [[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then
  128. export HADOOP_HDFS_HOME="${HADOOP_PREFIX}"
  129. fi
  130. # define HADOOP_YARN_HOME
  131. if [[ -z "${HADOOP_YARN_HOME}" ]] &&
  132. [[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then
  133. export HADOOP_YARN_HOME="${HADOOP_PREFIX}"
  134. fi
  135. # define HADOOP_MAPRED_HOME
  136. if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
  137. [[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then
  138. export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
  139. fi
  140. HADOOP_IDENT_STRING=${HADOP_IDENT_STRING:-$USER}
  141. HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
  142. HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
  143. HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
  144. HADOOP_NICENESS=${HADOOP_NICENESS:-0}
  145. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  146. HADOOP_PID_DIR=${HADOOP_PID_DIR:-/tmp}
  147. HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-${HADOOP_LOGLEVEL},console}
  148. HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_DAEMON_ROOT_LOGGER:-${HADOOP_LOGLEVEL},RFA}
  149. HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}
  150. HADOOP_SSH_OPTS=${HADOOP_SSH_OPTS:-"-o BatchMode=yes -o StrictHostKeyChecking=no -o ConnectTimeout=10s"}
  151. HADOOP_SECURE_LOG_DIR=${HADOOP_SECURE_LOG_DIR:-${HADOOP_LOG_DIR}}
  152. HADOOP_SECURE_PID_DIR=${HADOOP_SECURE_PID_DIR:-${HADOOP_PID_DIR}}
  153. HADOOP_SSH_PARALLEL=${HADOOP_SSH_PARALLEL:-10}
  154. }
  155. function hadoop_populate_slaves_file()
  156. {
  157. # NOTE: This function is not user replaceable.
  158. local slavesfile=$1
  159. shift
  160. if [[ -f "${slavesfile}" ]]; then
  161. # shellcheck disable=2034
  162. HADOOP_SLAVES="${slavesfile}"
  163. elif [[ -f "${HADOOP_CONF_DIR}/${slavesfile}" ]]; then
  164. # shellcheck disable=2034
  165. HADOOP_SLAVES="${HADOOP_CONF_DIR}/${slavesfile}"
  166. # shellcheck disable=2034
  167. YARN_SLAVES="${HADOOP_CONF_DIR}/${slavesfile}"
  168. else
  169. hadoop_error "ERROR: Cannot find hosts file \"${slavesfile}\""
  170. hadoop_exit_with_usage 1
  171. fi
  172. }
  173. function hadoop_rotate_log
  174. {
  175. #
  176. # log rotation (mainly used for .out files)
  177. # Users are likely to replace this one for something
  178. # that gzips or uses dates or who knows what.
  179. #
  180. # be aware that &1 and &2 might go through here
  181. # so don't do anything too crazy...
  182. #
  183. local log=$1;
  184. local num=${2:-5};
  185. if [[ -f "${log}" ]]; then # rotate logs
  186. while [[ ${num} -gt 1 ]]; do
  187. #shellcheck disable=SC2086
  188. let prev=${num}-1
  189. if [[ -f "${log}.${prev}" ]]; then
  190. mv "${log}.${prev}" "${log}.${num}"
  191. fi
  192. num=${prev}
  193. done
  194. mv "${log}" "${log}.${num}"
  195. fi
  196. }
  197. function hadoop_actual_ssh
  198. {
  199. # we are passing this function to xargs
  200. # should get hostname followed by rest of command line
  201. local slave=$1
  202. shift
  203. # shellcheck disable=SC2086
  204. ssh ${HADOOP_SSH_OPTS} ${slave} $"${@// /\\ }" 2>&1 | sed "s/^/$slave: /"
  205. }
  206. function hadoop_connect_to_hosts
  207. {
  208. # shellcheck disable=SC2124
  209. local params="$@"
  210. #
  211. # ssh (or whatever) to a host
  212. #
  213. # User can specify hostnames or a file where the hostnames are (not both)
  214. if [[ -n "${HADOOP_SLAVES}" && -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  215. hadoop_error "ERROR: Both HADOOP_SLAVES and HADOOP_SLAVE_NAME were defined. Aborting."
  216. exit 1
  217. fi
  218. if [[ -n "${HADOOP_SLAVE_NAMES}" ]] ; then
  219. SLAVE_NAMES=${HADOOP_SLAVE_NAMES}
  220. else
  221. SLAVE_FILE=${HADOOP_SLAVES:-${HADOOP_CONF_DIR}/slaves}
  222. fi
  223. # if pdsh is available, let's use it. otherwise default
  224. # to a loop around ssh. (ugh)
  225. if [[ -e '/usr/bin/pdsh' ]]; then
  226. if [[ -z "${HADOOP_SLAVE_NAMES}" ]] ; then
  227. # if we were given a file, just let pdsh deal with it.
  228. # shellcheck disable=SC2086
  229. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  230. -f "${HADOOP_SSH_PARALLEL}" -w ^"${SLAVE_FILE}" $"${@// /\\ }" 2>&1
  231. else
  232. # no spaces allowed in the pdsh arg host list
  233. # shellcheck disable=SC2086
  234. SLAVE_NAMES=$(echo ${SLAVE_NAMES} | tr -s ' ' ,)
  235. PDSH_SSH_ARGS_APPEND="${HADOOP_SSH_OPTS}" pdsh \
  236. -f "${HADOOP_SSH_PARALLEL}" -w "${SLAVE_NAMES}" $"${@// /\\ }" 2>&1
  237. fi
  238. else
  239. if [[ -z "${SLAVE_NAMES}" ]]; then
  240. SLAVE_NAMES=$(sed 's/#.*$//;/^$/d' "${SLAVE_FILE}")
  241. fi
  242. # quoting here gets tricky. it's easier to push it into a function
  243. # so that we don't have to deal with it. However...
  244. # xargs can't use a function so instead we'll export it out
  245. # and force it into a subshell
  246. # moral of the story: just use pdsh.
  247. export -f hadoop_actual_ssh
  248. export HADOOP_SSH_OPTS
  249. # xargs is used with option -I to replace the placeholder in arguments
  250. # list with each hostname read from stdin/pipe. But it consider one
  251. # line as one argument while reading from stdin/pipe. So place each
  252. # hostname in different lines while passing via pipe.
  253. SLAVE_NAMES=$(echo "$SLAVE_NAMES" | tr ' ' '\n' )
  254. echo "${SLAVE_NAMES}" | \
  255. xargs -n 1 -P"${HADOOP_SSH_PARALLEL}" \
  256. -I {} bash -c -- "hadoop_actual_ssh {} ${params}"
  257. wait
  258. fi
  259. }
  260. function hadoop_validate_classname
  261. {
  262. local class=$1
  263. shift 1
  264. if [[ ! ${class} =~ \. ]]; then
  265. # assuming the arg is typo of command if it does not conatain ".".
  266. # class belonging to no package is not allowed as a result.
  267. hadoop_error "ERROR: ${class} is not COMMAND nor fully qualified CLASSNAME."
  268. return 1
  269. fi
  270. return 0
  271. }
  272. function hadoop_add_param
  273. {
  274. #
  275. # general param dedupe..
  276. # $1 is what we are adding to
  277. # $2 is the name of what we want to add (key)
  278. # $3 is the key+value of what we're adding
  279. #
  280. # doing it this way allows us to support all sorts of
  281. # different syntaxes, just so long as they are space
  282. # delimited
  283. #
  284. if [[ ! ${!1} =~ $2 ]] ; then
  285. # shellcheck disable=SC2086
  286. eval $1="'${!1} $3'"
  287. hadoop_debug "$1 accepted $3"
  288. else
  289. hadoop_debug "$1 declined $3"
  290. fi
  291. }
  292. function hadoop_add_classpath
  293. {
  294. # two params:
  295. # $1 = directory, file, wildcard, whatever to add
  296. # $2 = before or after, which determines where in the
  297. # classpath this object should go. default is after
  298. # return 0 = success (added or duplicate)
  299. # return 1 = failure (doesn't exist, whatever)
  300. # However, with classpath (& JLP), we can do dedupe
  301. # along with some sanity checking (e.g., missing directories)
  302. # since we have a better idea of what is legal
  303. #
  304. # for wildcard at end, we can
  305. # at least check the dir exists
  306. if [[ $1 =~ ^.*\*$ ]]; then
  307. local mp=$(dirname "$1")
  308. if [[ ! -d "${mp}" ]]; then
  309. hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
  310. return 1
  311. fi
  312. # no wildcard in the middle, so check existence
  313. # (doesn't matter *what* it is)
  314. elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
  315. hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
  316. return 1
  317. fi
  318. if [[ -z "${CLASSPATH}" ]]; then
  319. CLASSPATH=$1
  320. hadoop_debug "Initial CLASSPATH=$1"
  321. elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
  322. if [[ "$2" = "before" ]]; then
  323. CLASSPATH="$1:${CLASSPATH}"
  324. hadoop_debug "Prepend CLASSPATH: $1"
  325. else
  326. CLASSPATH+=:$1
  327. hadoop_debug "Append CLASSPATH: $1"
  328. fi
  329. else
  330. hadoop_debug "Dupe CLASSPATH: $1"
  331. fi
  332. return 0
  333. }
  334. function hadoop_add_colonpath
  335. {
  336. # two params:
  337. # $1 = directory, file, wildcard, whatever to add
  338. # $2 = before or after, which determines where in the
  339. # classpath this object should go
  340. # return 0 = success
  341. # return 1 = failure (duplicate)
  342. # this is CLASSPATH, JLP, etc but with dedupe but no
  343. # other checking
  344. if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
  345. if [[ -z "${!1}" ]]; then
  346. # shellcheck disable=SC2086
  347. eval $1="'$2'"
  348. hadoop_debug "Initial colonpath($1): $2"
  349. elif [[ "$3" = "before" ]]; then
  350. # shellcheck disable=SC2086
  351. eval $1="'$2:${!1}'"
  352. hadoop_debug "Prepend colonpath($1): $2"
  353. else
  354. # shellcheck disable=SC2086
  355. eval $1+="'$2'"
  356. hadoop_debug "Append colonpath($1): $2"
  357. fi
  358. return 0
  359. fi
  360. hadoop_debug "Rejected colonpath($1): $2"
  361. return 1
  362. }
  363. function hadoop_add_javalibpath
  364. {
  365. # specialized function for a common use case
  366. hadoop_add_colonpath JAVA_LIBRARY_PATH "$1" "$2"
  367. }
  368. function hadoop_add_ldlibpath
  369. {
  370. # specialized function for a common use case
  371. hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
  372. # note that we export this
  373. export LD_LIBRARY_PATH
  374. }
  375. function hadoop_add_to_classpath_common
  376. {
  377. #
  378. # get all of the common jars+config in the path
  379. #
  380. # developers
  381. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  382. hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
  383. fi
  384. if [[ -d "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}/webapps" ]]; then
  385. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"
  386. fi
  387. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
  388. hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
  389. }
  390. function hadoop_add_to_classpath_hdfs
  391. {
  392. #
  393. # get all of the hdfs jars+config in the path
  394. #
  395. # developers
  396. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  397. hadoop_add_classpath "${HADOOP_HDFS_HOME}/hadoop-hdfs/target/classes"
  398. fi
  399. # put hdfs in classpath if present
  400. if [[ -d "${HADOOP_HDFS_HOME}/${HDFS_DIR}/webapps" ]]; then
  401. hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_DIR}"
  402. fi
  403. hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR}"'/*'
  404. hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_DIR}"'/*'
  405. }
  406. function hadoop_add_to_classpath_yarn
  407. {
  408. local i
  409. #
  410. # get all of the yarn jars+config in the path
  411. #
  412. # developers
  413. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  414. for i in yarn-api yarn-common yarn-mapreduce yarn-master-worker \
  415. yarn-server/yarn-server-nodemanager \
  416. yarn-server/yarn-server-common \
  417. yarn-server/yarn-server-resourcemanager; do
  418. hadoop_add_classpath "${HADOOP_YARN_HOME}/$i/target/classes"
  419. done
  420. hadoop_add_classpath "${HADOOP_YARN_HOME}/build/test/classes"
  421. hadoop_add_classpath "${HADOOP_YARN_HOME}/build/tools"
  422. fi
  423. if [[ -d "${HADOOP_YARN_HOME}/${YARN_DIR}/webapps" ]]; then
  424. hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}"
  425. fi
  426. hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}"'/*'
  427. hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}"'/*'
  428. }
  429. function hadoop_add_to_classpath_mapred
  430. {
  431. #
  432. # get all of the mapreduce jars+config in the path
  433. #
  434. # developers
  435. if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
  436. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-shuffle/target/classes"
  437. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-common/target/classes"
  438. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-hs/target/classes"
  439. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-hs-plugins/target/classes"
  440. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-app/target/classes"
  441. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-jobclient/target/classes"
  442. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-core/target/classes"
  443. fi
  444. if [[ -d "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}/webapps" ]]; then
  445. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}"
  446. fi
  447. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_LIB_JARS_DIR}"'/*'
  448. hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}"'/*'
  449. }
  450. function hadoop_add_to_classpath_userpath
  451. {
  452. # Add the user-specified HADOOP_CLASSPATH to the
  453. # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
  454. # is not set.
  455. # Add it first or last depending on if user has
  456. # set env-var HADOOP_USER_CLASSPATH_FIRST
  457. # we'll also dedupe it, because we're cool like that.
  458. #
  459. local c
  460. local array
  461. local i
  462. local j
  463. let c=0
  464. if [[ -n "${HADOOP_CLASSPATH}" ]]; then
  465. # I wonder if Java runs on VMS.
  466. for i in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
  467. array[$c]=$i
  468. let c+=1
  469. done
  470. let j=c-1
  471. if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
  472. if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
  473. for ((i=j; i>=0; i--)); do
  474. hadoop_add_classpath "${array[$i]}" before
  475. done
  476. else
  477. for ((i=0; i<=j; i++)); do
  478. hadoop_add_classpath "${array[$i]}" after
  479. done
  480. fi
  481. fi
  482. fi
  483. }
  484. function hadoop_os_tricks
  485. {
  486. local bindv6only
  487. # some OSes have special needs. here's some out of the box
  488. # examples for OS X and Linux. Vendors, replace this with your special sauce.
  489. case ${HADOOP_OS_TYPE} in
  490. Darwin)
  491. if [[ -z "${JAVA_HOME}" ]]; then
  492. if [[ -x /usr/libexec/java_home ]]; then
  493. export JAVA_HOME="$(/usr/libexec/java_home)"
  494. else
  495. export JAVA_HOME=/Library/Java/Home
  496. fi
  497. fi
  498. ;;
  499. Linux)
  500. bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
  501. # NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
  502. # undocumented in hadoop-env.sh because we don't want users to
  503. # shoot themselves in the foot while devs make IPv6 work.
  504. if [[ -n "${bindv6only}" ]] &&
  505. [[ "${bindv6only}" -eq "1" ]] &&
  506. [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
  507. hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
  508. hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
  509. hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
  510. exit 1
  511. fi
  512. # Newer versions of glibc use an arena memory allocator that
  513. # causes virtual # memory usage to explode. This interacts badly
  514. # with the many threads that we use in Hadoop. Tune the variable
  515. # down to prevent vmem explosion.
  516. export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
  517. ;;
  518. esac
  519. }
  520. function hadoop_java_setup
  521. {
  522. # Bail if we did not detect it
  523. if [[ -z "${JAVA_HOME}" ]]; then
  524. hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
  525. exit 1
  526. fi
  527. if [[ ! -d "${JAVA_HOME}" ]]; then
  528. hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
  529. exit 1
  530. fi
  531. JAVA="${JAVA_HOME}/bin/java"
  532. if [[ ! -x "$JAVA" ]]; then
  533. hadoop_error "ERROR: $JAVA is not executable."
  534. exit 1
  535. fi
  536. }
  537. function hadoop_finalize_libpaths
  538. {
  539. if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
  540. hadoop_add_param HADOOP_OPTS java.library.path \
  541. "-Djava.library.path=${JAVA_LIBRARY_PATH}"
  542. export LD_LIBRARY_PATH
  543. fi
  544. }
  545. function hadoop_finalize_hadoop_heap
  546. {
  547. if [[ -n "${HADOOP_HEAPSIZE_MAX}" ]]; then
  548. if [[ "${HADOOP_HEAPSIZE_MAX}" =~ ^[0-9]+$ ]]; then
  549. HADOOP_HEAPSIZE_MAX="${HADOOP_HEAPSIZE_MAX}m"
  550. fi
  551. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE_MAX}"
  552. fi
  553. # backwards compatibility
  554. if [[ -n "${HADOOP_HEAPSIZE}" ]]; then
  555. if [[ "${HADOOP_HEAPSIZE}" =~ ^[0-9]+$ ]]; then
  556. HADOOP_HEAPSIZE="${HADOOP_HEAPSIZE}m"
  557. fi
  558. hadoop_add_param HADOOP_OPTS Xmx "-Xmx${HADOOP_HEAPSIZE}"
  559. fi
  560. if [[ -n "${HADOOP_HEAPSIZE_MIN}" ]]; then
  561. if [[ "${HADOOP_HEAPSIZE_MIN}" =~ ^[0-9]+$ ]]; then
  562. HADOOP_HEAPSIZE_MIN="${HADOOP_HEAPSIZE_MIN}m"
  563. fi
  564. hadoop_add_param HADOOP_OPTS Xms "-Xms${HADOOP_HEAPSIZE_MIN}"
  565. fi
  566. }
  567. #
  568. # fill in any last minute options that might not have been defined yet
  569. #
  570. function hadoop_finalize_hadoop_opts
  571. {
  572. hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
  573. hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}"
  574. hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_PREFIX}"
  575. hadoop_add_param HADOOP_OPTS hadoop.id.str "-Dhadoop.id.str=${HADOOP_IDENT_STRING}"
  576. hadoop_add_param HADOOP_OPTS hadoop.root.logger "-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER}"
  577. hadoop_add_param HADOOP_OPTS hadoop.policy.file "-Dhadoop.policy.file=${HADOOP_POLICYFILE}"
  578. hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
  579. }
  580. function hadoop_finalize_classpath
  581. {
  582. hadoop_add_classpath "${HADOOP_CONF_DIR}" before
  583. # user classpath gets added at the last minute. this allows
  584. # override of CONF dirs and more
  585. hadoop_add_to_classpath_userpath
  586. }
  587. function hadoop_finalize
  588. {
  589. # user classpath gets added at the last minute. this allows
  590. # override of CONF dirs and more
  591. hadoop_finalize_classpath
  592. hadoop_finalize_libpaths
  593. hadoop_finalize_hadoop_heap
  594. hadoop_finalize_hadoop_opts
  595. }
  596. function hadoop_exit_with_usage
  597. {
  598. # NOTE: This function is not user replaceable.
  599. local exitcode=$1
  600. if [[ -z $exitcode ]]; then
  601. exitcode=1
  602. fi
  603. if declare -F hadoop_usage >/dev/null ; then
  604. hadoop_usage
  605. elif [[ -x /usr/bin/cowsay ]]; then
  606. /usr/bin/cowsay -f elephant "Sorry, no help available."
  607. else
  608. hadoop_error "Sorry, no help available."
  609. fi
  610. exit $exitcode
  611. }
  612. function hadoop_verify_secure_prereq
  613. {
  614. # if you are on an OS like Illumos that has functional roles
  615. # and you are using pfexec, you'll probably want to change
  616. # this.
  617. # ${EUID} comes from the shell itself!
  618. if [[ "${EUID}" -ne 0 ]] && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
  619. hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
  620. exit 1
  621. else
  622. return 0
  623. fi
  624. }
  625. function hadoop_setup_secure_service
  626. {
  627. # need a more complicated setup? replace me!
  628. HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
  629. HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
  630. }
  631. function hadoop_verify_piddir
  632. {
  633. if [[ -z "${HADOOP_PID_DIR}" ]]; then
  634. hadoop_error "No pid directory defined."
  635. exit 1
  636. fi
  637. if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
  638. hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
  639. mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
  640. if [[ $? -gt 0 ]]; then
  641. hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
  642. exit 1
  643. fi
  644. fi
  645. touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  646. if [[ $? -gt 0 ]]; then
  647. hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
  648. exit 1
  649. fi
  650. rm "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
  651. }
  652. function hadoop_verify_logdir
  653. {
  654. if [[ -z "${HADOOP_LOG_DIR}" ]]; then
  655. hadoop_error "No log directory defined."
  656. exit 1
  657. fi
  658. if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
  659. hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
  660. mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
  661. if [[ $? -gt 0 ]]; then
  662. hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
  663. exit 1
  664. fi
  665. fi
  666. touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  667. if [[ $? -gt 0 ]]; then
  668. hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
  669. exit 1
  670. fi
  671. rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
  672. }
  673. function hadoop_status_daemon()
  674. {
  675. #
  676. # LSB 4.1.0 compatible status command (1)
  677. #
  678. # 0 = program is running
  679. # 1 = dead, but still a pid (2)
  680. # 2 = (not used by us)
  681. # 3 = not running
  682. #
  683. # 1 - this is not an endorsement of the LSB
  684. #
  685. # 2 - technically, the specification says /var/run/pid, so
  686. # we should never return this value, but we're giving
  687. # them the benefit of a doubt and returning 1 even if
  688. # our pid is not in in /var/run .
  689. #
  690. local pidfile=$1
  691. shift
  692. local pid
  693. if [[ -f "${pidfile}" ]]; then
  694. pid=$(cat "${pidfile}")
  695. if ps -p "${pid}" > /dev/null 2>&1; then
  696. return 0
  697. fi
  698. return 1
  699. fi
  700. return 3
  701. }
  702. function hadoop_java_exec
  703. {
  704. # run a java command. this is used for
  705. # non-daemons
  706. local command=$1
  707. local class=$2
  708. shift 2
  709. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  710. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  711. export CLASSPATH
  712. #shellcheck disable=SC2086
  713. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  714. }
  715. function hadoop_start_daemon
  716. {
  717. # this is our non-privileged daemon starter
  718. # that fires up a daemon in the *foreground*
  719. # so complex! so wow! much java!
  720. local command=$1
  721. local class=$2
  722. local pidfile=$3
  723. shift 3
  724. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  725. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  726. # this is for the non-daemon pid creation
  727. #shellcheck disable=SC2086
  728. echo $$ > "${pidfile}" 2>/dev/null
  729. if [[ $? -gt 0 ]]; then
  730. hadoop_error "ERROR: Cannot write ${command} pid ${pidfile}."
  731. fi
  732. export CLASSPATH
  733. #shellcheck disable=SC2086
  734. exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
  735. }
  736. function hadoop_start_daemon_wrapper
  737. {
  738. # this is our non-privileged daemon start
  739. # that fires up a daemon in the *background*
  740. local daemonname=$1
  741. local class=$2
  742. local pidfile=$3
  743. local outfile=$4
  744. shift 4
  745. local counter
  746. hadoop_rotate_log "${outfile}"
  747. hadoop_start_daemon "${daemonname}" \
  748. "$class" \
  749. "${pidfile}" \
  750. "$@" >> "${outfile}" 2>&1 < /dev/null &
  751. # we need to avoid a race condition here
  752. # so let's wait for the fork to finish
  753. # before overriding with the daemonized pid
  754. (( counter=0 ))
  755. while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
  756. sleep 1
  757. (( counter++ ))
  758. done
  759. # this is for daemon pid creation
  760. #shellcheck disable=SC2086
  761. echo $! > "${pidfile}" 2>/dev/null
  762. if [[ $? -gt 0 ]]; then
  763. hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
  764. fi
  765. # shellcheck disable=SC2086
  766. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  767. if [[ $? -gt 0 ]]; then
  768. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  769. fi
  770. # shellcheck disable=SC2086
  771. disown %+ >/dev/null 2>&1
  772. if [[ $? -gt 0 ]]; then
  773. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  774. fi
  775. sleep 1
  776. # capture the ulimit output
  777. ulimit -a >> "${outfile}" 2>&1
  778. # shellcheck disable=SC2086
  779. if ! ps -p $! >/dev/null 2>&1; then
  780. return 1
  781. fi
  782. return 0
  783. }
  784. function hadoop_start_secure_daemon
  785. {
  786. # this is used to launch a secure daemon in the *foreground*
  787. #
  788. local daemonname=$1
  789. local class=$2
  790. # pid file to create for our deamon
  791. local daemonpidfile=$3
  792. # where to send stdout. jsvc has bad habits so this *may* be &1
  793. # which means you send it to stdout!
  794. local daemonoutfile=$4
  795. # where to send stderr. same thing, except &2 = stderr
  796. local daemonerrfile=$5
  797. local privpidfile=$6
  798. shift 6
  799. hadoop_rotate_log "${daemonoutfile}"
  800. hadoop_rotate_log "${daemonerrfile}"
  801. jsvc="${JSVC_HOME}/jsvc"
  802. if [[ ! -f "${jsvc}" ]]; then
  803. hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
  804. hadoop_error "or privileged daemons. Please download and install jsvc from "
  805. hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
  806. hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
  807. exit 1
  808. fi
  809. # note that shellcheck will throw a
  810. # bogus for-our-use-case 2086 here.
  811. # it doesn't properly support multi-line situations
  812. hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
  813. hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
  814. #shellcheck disable=SC2086
  815. echo $$ > "${privpidfile}" 2>/dev/null
  816. if [[ $? -gt 0 ]]; then
  817. hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
  818. fi
  819. exec "${jsvc}" \
  820. "-Dproc_${daemonname}" \
  821. -outfile "${daemonoutfile}" \
  822. -errfile "${daemonerrfile}" \
  823. -pidfile "${daemonpidfile}" \
  824. -nodetach \
  825. -user "${HADOOP_SECURE_USER}" \
  826. -cp "${CLASSPATH}" \
  827. ${HADOOP_OPTS} \
  828. "${class}" "$@"
  829. }
  830. function hadoop_start_secure_daemon_wrapper
  831. {
  832. # this wraps hadoop_start_secure_daemon to take care
  833. # of the dirty work to launch a daemon in the background!
  834. local daemonname=$1
  835. local class=$2
  836. # same rules as hadoop_start_secure_daemon except we
  837. # have some additional parameters
  838. local daemonpidfile=$3
  839. local daemonoutfile=$4
  840. # the pid file of the subprocess that spawned our
  841. # secure launcher
  842. local jsvcpidfile=$5
  843. # the output of the subprocess that spawned our secure
  844. # launcher
  845. local jsvcoutfile=$6
  846. local daemonerrfile=$7
  847. shift 7
  848. local counter
  849. hadoop_rotate_log "${jsvcoutfile}"
  850. hadoop_start_secure_daemon \
  851. "${daemonname}" \
  852. "${class}" \
  853. "${daemonpidfile}" \
  854. "${daemonoutfile}" \
  855. "${daemonerrfile}" \
  856. "${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
  857. # we need to avoid a race condition here
  858. # so let's wait for the fork to finish
  859. # before overriding with the daemonized pid
  860. (( counter=0 ))
  861. while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
  862. sleep 1
  863. (( counter++ ))
  864. done
  865. # this is for the daemon pid creation
  866. #shellcheck disable=SC2086
  867. echo $! > "${jsvcpidfile}" 2>/dev/null
  868. if [[ $? -gt 0 ]]; then
  869. hadoop_error "ERROR: Cannot write ${daemonname} pid ${daemonpidfile}."
  870. fi
  871. sleep 1
  872. #shellcheck disable=SC2086
  873. renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
  874. if [[ $? -gt 0 ]]; then
  875. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
  876. fi
  877. if [[ -f "${daemonpidfile}" ]]; then
  878. #shellcheck disable=SC2046
  879. renice "${HADOOP_NICENESS}" $(cat "${daemonpidfile}" 2>/dev/null) >/dev/null 2>&1
  880. if [[ $? -gt 0 ]]; then
  881. hadoop_error "ERROR: Cannot set priority of ${daemonname} process $(cat "${daemonpidfile}" 2>/dev/null)"
  882. fi
  883. fi
  884. #shellcheck disable=SC2046
  885. disown %+ >/dev/null 2>&1
  886. if [[ $? -gt 0 ]]; then
  887. hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
  888. fi
  889. # capture the ulimit output
  890. su "${HADOOP_SECURE_USER}" -c 'bash -c "ulimit -a"' >> "${jsvcoutfile}" 2>&1
  891. #shellcheck disable=SC2086
  892. if ! ps -p $! >/dev/null 2>&1; then
  893. return 1
  894. fi
  895. return 0
  896. }
  897. function hadoop_stop_daemon
  898. {
  899. local cmd=$1
  900. local pidfile=$2
  901. shift 2
  902. local pid
  903. if [[ -f "${pidfile}" ]]; then
  904. pid=$(cat "$pidfile")
  905. kill "${pid}" >/dev/null 2>&1
  906. sleep "${HADOOP_STOP_TIMEOUT}"
  907. if kill -0 "${pid}" > /dev/null 2>&1; then
  908. hadoop_error "WARNING: ${cmd} did not stop gracefully after ${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
  909. kill -9 "${pid}" >/dev/null 2>&1
  910. fi
  911. if ps -p "${pid}" > /dev/null 2>&1; then
  912. hadoop_error "ERROR: Unable to kill ${pid}"
  913. else
  914. rm -f "${pidfile}" >/dev/null 2>&1
  915. fi
  916. fi
  917. }
  918. function hadoop_stop_secure_daemon
  919. {
  920. local command=$1
  921. local daemonpidfile=$2
  922. local privpidfile=$3
  923. shift 3
  924. local ret
  925. hadoop_stop_daemon "${command}" "${daemonpidfile}"
  926. ret=$?
  927. rm -f "${daemonpidfile}" "${privpidfile}" 2>/dev/null
  928. return ${ret}
  929. }
  930. function hadoop_daemon_handler
  931. {
  932. local daemonmode=$1
  933. local daemonname=$2
  934. local class=$3
  935. local daemon_pidfile=$4
  936. local daemon_outfile=$5
  937. shift 5
  938. case ${daemonmode} in
  939. status)
  940. hadoop_status_daemon "${daemon_pidfile}"
  941. exit $?
  942. ;;
  943. stop)
  944. hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
  945. exit $?
  946. ;;
  947. ##COMPAT -- older hadoops would also start daemons by default
  948. start|default)
  949. hadoop_verify_piddir
  950. hadoop_verify_logdir
  951. hadoop_status_daemon "${daemon_pidfile}"
  952. if [[ $? == 0 ]]; then
  953. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  954. exit 1
  955. else
  956. # stale pid file, so just remove it and continue on
  957. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  958. fi
  959. ##COMPAT - differenticate between --daemon start and nothing
  960. # "nothing" shouldn't detach
  961. if [[ "$daemonmode" = "default" ]]; then
  962. hadoop_start_daemon "${daemonname}" "${class}" "${daemon_pidfile}" "$@"
  963. else
  964. hadoop_start_daemon_wrapper "${daemonname}" \
  965. "${class}" "${daemon_pidfile}" "${daemon_outfile}" "$@"
  966. fi
  967. ;;
  968. esac
  969. }
  970. function hadoop_secure_daemon_handler
  971. {
  972. local daemonmode=$1
  973. local daemonname=$2
  974. local classname=$3
  975. local daemon_pidfile=$4
  976. local daemon_outfile=$5
  977. local priv_pidfile=$6
  978. local priv_outfile=$7
  979. local priv_errfile=$8
  980. shift 8
  981. case ${daemonmode} in
  982. status)
  983. hadoop_status_daemon "${daemon_pidfile}"
  984. exit $?
  985. ;;
  986. stop)
  987. hadoop_stop_secure_daemon "${daemonname}" \
  988. "${daemon_pidfile}" "${priv_pidfile}"
  989. exit $?
  990. ;;
  991. ##COMPAT -- older hadoops would also start daemons by default
  992. start|default)
  993. hadoop_verify_piddir
  994. hadoop_verify_logdir
  995. hadoop_status_daemon "${daemon_pidfile}"
  996. if [[ $? == 0 ]]; then
  997. hadoop_error "${daemonname} is running as process $(cat "${daemon_pidfile}"). Stop it first."
  998. exit 1
  999. else
  1000. # stale pid file, so just remove it and continue on
  1001. rm -f "${daemon_pidfile}" >/dev/null 2>&1
  1002. fi
  1003. ##COMPAT - differenticate between --daemon start and nothing
  1004. # "nothing" shouldn't detach
  1005. if [[ "${daemonmode}" = "default" ]]; then
  1006. hadoop_start_secure_daemon "${daemonname}" "${classname}" \
  1007. "${daemon_pidfile}" "${daemon_outfile}" \
  1008. "${priv_errfile}" "${priv_pidfile}" "$@"
  1009. else
  1010. hadoop_start_secure_daemon_wrapper "${daemonname}" "${classname}" \
  1011. "${daemon_pidfile}" "${daemon_outfile}" \
  1012. "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
  1013. fi
  1014. ;;
  1015. esac
  1016. }
  1017. function hadoop_verify_user
  1018. {
  1019. local command=$1
  1020. local uservar="HADOOP_${command}_USER"
  1021. if [[ -n ${!uservar} ]]; then
  1022. if [[ ${!uservar} != ${USER} ]]; then
  1023. hadoop_error "ERROR: ${command} can only be executed by ${!uservar}."
  1024. exit 1
  1025. fi
  1026. fi
  1027. }