Browse Source

HADOOP-13108. dynamic subcommands need a way to manipulate arguments

Allen Wittenauer 9 years ago
parent
commit
1dcd9a9a03

+ 9 - 7
hadoop-common-project/hadoop-common/src/main/bin/hadoop

@@ -200,16 +200,18 @@ fi
 HADOOP_SUBCMD=$1
 shift
 
+HADOOP_SUBCMD_ARGS=("$@")
+
 if declare -f hadoop_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
-  "hadoop_subcommand_${HADOOP_SUBCMD}" "$@"
+  "hadoop_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
 else
-  hadoopcmd_case "${HADOOP_SUBCMD}" "$@"
+  hadoopcmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
 fi
 
 hadoop_verify_user "${HADOOP_SUBCMD}"
 
 if [[ ${HADOOP_SLAVE_MODE} = true ]]; then
-  hadoop_common_slave_mode_execute "${HADOOP_HDFS_HOME}/bin/hdfs" "${HADOOP_USER_PARAMS[@]}"
+  hadoop_common_slave_mode_execute "${HADOOP_HDFS_HOME}/bin/hadoop" "${HADOOP_USER_PARAMS[@]}"
   exit $?
 fi
 
@@ -252,7 +254,7 @@ if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
       "${priv_pidfile}" \
       "${priv_outfile}" \
       "${priv_errfile}" \
-      "$@"
+      "${HADOOP_SUBCMD_ARGS[@]}"
   else
     hadoop_daemon_handler \
       "${HADOOP_DAEMON_MODE}" \
@@ -260,10 +262,10 @@ if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
       "${HADOOP_CLASSNAME}" \
       "${daemon_pidfile}" \
       "${daemon_outfile}" \
-      "$@"
+      "${HADOOP_SUBCMD_ARGS[@]}"
   fi
   exit $?
 else
   # shellcheck disable=SC2086
-  hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "$@"
-fi
+  hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "${HADOOP_SUBCMD_ARGS[@]}"
+fi

+ 12 - 0
hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md

@@ -165,6 +165,14 @@ This is the name of the Java class to execute.
 
 This is the name of the script that is being executed.  It will be one of hadoop, hdfs, mapred, or yarn.
 
+* HADOOP\_SUBCMD
+
+This is the subcommand that was passed on the command line.
+
+* HADOOP\_SUBCMD\_ARGS
+
+This array contains the argument list after the Apache Hadoop common argument processing has taken place and is the same list that is passed to the subcommand function as arguments.  For example, if `hadoop --debug subcmd 1 2 3` has been executed on the command line, then `${HADOOP_SUBCMD_ARGS[0]}` will be 1 and `hadoop_subcommand_subcmd` will also have $1 equal to 1.  This array list MAY be modified by subcommand functions to add or delete values from the argument list for further processing.
+
 * HADOOP\_SUBCMD\_SECURESERVICE
 
 If this command should/will be executed as a secure daemon, set this to true.
@@ -176,3 +184,7 @@ If this command should/will be executed as a secure daemon, set the user name to
 * HADOOP\_SUBCMD\_SUPPORTDAEMONIZATION
 
 If this command can be executed as a daemon, set this to true.
+
+* HADOOP\_USER\_PARAMS
+
+This is the full content of the command line, prior to any parsing done. It will contain flags such as `--debug`.  It MAY NOT be manipulated.

+ 22 - 9
hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs

@@ -276,10 +276,12 @@ fi
 HADOOP_SUBCMD=$1
 shift
 
+HADOOP_SUBCMD_ARGS=("$@")
+
 if declare -f hdfs_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
-  "hdfs_subcommand_${HADOOP_SUBCMD}" "$@"
+  "hdfs_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
 else
-  hdfscmd_case "${HADOOP_SUBCMD}" "$@"
+  hdfscmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
 fi
 
 hadoop_verify_user "${HADOOP_SUBCMD}"
@@ -320,15 +322,26 @@ hadoop_finalize
 if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
   if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
     hadoop_secure_daemon_handler \
-    "${HADOOP_DAEMON_MODE}" "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}"\
-    "${daemon_pidfile}" "${daemon_outfile}" \
-    "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
+      "${HADOOP_DAEMON_MODE}" \
+      "${HADOOP_SUBCMD}" \
+      "${HADOOP_CLASSNAME}" \
+      "${daemon_pidfile}" \
+      "${daemon_outfile}" \
+      "${priv_pidfile}" \
+      "${priv_outfile}" \
+      "${priv_errfile}" \
+      "${HADOOP_SUBCMD_ARGS[@]}"
   else
-    hadoop_daemon_handler "${HADOOP_DAEMON_MODE}" "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}"\
-    "${daemon_pidfile}" "${daemon_outfile}" "$@"
+    hadoop_daemon_handler \
+      "${HADOOP_DAEMON_MODE}" \
+      "${HADOOP_SUBCMD}" \
+      "${HADOOP_CLASSNAME}" \
+      "${daemon_pidfile}" \
+      "${daemon_outfile}" \
+      "${HADOOP_SUBCMD_ARGS[@]}"
   fi
   exit $?
 else
   # shellcheck disable=SC2086
-  hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "$@"
-fi
+  hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "${HADOOP_SUBCMD_ARGS[@]}"
+fi

+ 7 - 5
hadoop-mapreduce-project/bin/mapred

@@ -141,10 +141,12 @@ fi
 HADOOP_SUBCMD=$1
 shift
 
+HADOOP_SUBCMD_ARGS=("$@")
+
 if declare -f mapred_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
-  "mapred_subcommand_${HADOOP_SUBCMD}" "$@"
+  "mapred_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
 else
-  mapredcmd_case "${HADOOP_SUBCMD}" "$@"
+  mapredcmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
 fi
 
 hadoop_verify_user "${HADOOP_SUBCMD}"
@@ -189,7 +191,7 @@ if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
       "${priv_pidfile}" \
       "${priv_outfile}" \
       "${priv_errfile}" \
-      "$@"
+      "${HADOOP_SUBCMD_ARGS[@]}"
   else
     hadoop_daemon_handler \
       "${HADOOP_DAEMON_MODE}" \
@@ -197,9 +199,9 @@ if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
       "${HADOOP_CLASSNAME}" \
       "${daemon_pidfile}" \
       "${daemon_outfile}" \
-      "$@"
+      "${HADOOP_SUBCMD_ARGS[@]}"
   fi
   exit $?
 else
-  hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "$@"
+  hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "${HADOOP_SUBCMD_ARGS[@]}"
 fi

+ 7 - 5
hadoop-yarn-project/hadoop-yarn/bin/yarn

@@ -253,11 +253,12 @@ fi
 HADOOP_SUBCMD=$1
 shift
 
+HADOOP_SUBCMD_ARGS=("$@")
 
 if declare -f yarn_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
-  "yarn_subcommand_${HADOOP_SUBCMD}" "$@"
+  "yarn_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
 else
-  yarncmd_case "${HADOOP_SUBCMD}" "$@"
+  yarncmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
 fi
 
 hadoop_verify_user "${HADOOP_SUBCMD}"
@@ -301,7 +302,7 @@ if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
       "${priv_pidfile}" \
       "${priv_outfile}" \
       "${priv_errfile}" \
-      "$@"
+      "${HADOOP_SUBCMD_ARGS[@]}"
   else
     hadoop_daemon_handler \
       "${HADOOP_DAEMON_MODE}" \
@@ -309,9 +310,10 @@ if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
       "${HADOOP_CLASSNAME}" \
       "${daemon_pidfile}" \
       "${daemon_outfile}" \
-      "$@"
+      "${HADOOP_SUBCMD_ARGS[@]}"
   fi
   exit $?
 else
-  hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "$@"
+  # shellcheck disable=SC2086
+  hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "${HADOOP_SUBCMD_ARGS[@]}"
 fi