Browse Source

commit fa353840f08503d08008d9b9e9e946b6a00bbc18
Author: Devaraj Das <ddas@yahoo-inc.com>
Date: Thu May 20 15:29:58 2010 -0700

HDFS:1150 from https://issues.apache.org/jira/secure/attachment/12445111/HDFS-1150-BF-Y20-LOG-DIRS-2.patch

+++ b/YAHOO-CHANGES.txt
+ HDFS-1150. Fixes the earlier patch to do logging in the right directory
+ and also adds facility for monitoring processes (via -Dprocname in the
+ command line). (Jakob Homan via ddas)
+


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.20-security-patches@1077478 13f79535-47bb-0310-9956-ffa450edef68

Owen O'Malley 14 years ago
parent
commit
df7c221523
2 changed files with 35 additions and 25 deletions
  1. 23 21
      bin/hadoop
  2. 12 4
      bin/hadoop-daemon.sh

+ 23 - 21
bin/hadoop

@@ -91,6 +91,14 @@ if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"
 fi
 
+# Determine if we're starting a secure datanode, and if so, redefine appropriate variables
+if [ "$COMMAND" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
+  HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR
+  HADOOP_LOG_DIR=$HADOOP_SECURE_DN_LOG_DIR
+  HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER
+  starting_secure_dn="true"
+fi
+
 # some Java parameters
 if [ "$JAVA_HOME" != "" ]; then
   #echo "run java in $JAVA_HOME"
@@ -304,28 +312,22 @@ fi
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE"
 
 # Check to see if we should start a secure datanode
-if [ "$COMMAND" = "datanode" ]; then
-  if [[ $EUID -eq 0 ]]; then
-    if [ "$HADOOP_SECURE_DN_USER" = "" ]; then
-      HADOOP_SECURE_DN_USER="hdfs"
-    fi
-
-    if [ "$HADOOP_PID_DIR" = "" ]; then
-      HADOOP_SECURE_DN_PID="/tmp/hadoop_secure_dn.pid"
-    else
-      HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
-    fi
-
-    exec "jsvc" -outfile "$HADOOP_LOG_DIR/jsvc.out" \
-                -errfile "$HADOOP_LOG_DIR/jsvc.err" \
-                -pidfile "$HADOOP_SECURE_DN_PID" \
-                -nodetach \
-                -user "$HADOOP_SECURE_DN_USER" \
-                -cp "$CLASSPATH" \
-                $JAVA_HEAP_MAX $HADOOP_OPTS \
-                org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter "$@"
+if [ "$starting_secure_dn" = "true" ]; then
+  if [ "$HADOOP_PID_DIR" = "" ]; then
+    HADOOP_SECURE_DN_PID="/tmp/hadoop_secure_dn.pid"
+  else
+   HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
   fi
+
+  exec "$HADOOP_HOME/bin/jsvc" -Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \
+                                               -errfile "$HADOOP_LOG_DIR/jsvc.err" \
+                                               -pidfile "$HADOOP_SECURE_DN_PID" \
+                                               -nodetach \
+                                               -user "$HADOOP_SECURE_DN_USER" \
+                                               -cp "$CLASSPATH" \
+                                               $JAVA_HEAP_MAX $HADOOP_OPTS \
+                                               org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter "$@"
 else
   # run it
-  exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
+  exec "$JAVA" -Dproc_$COMMAND $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
 fi

+ 12 - 4
bin/hadoop-daemon.sh

@@ -68,20 +68,28 @@ if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"
 fi
 
+# Determine if we're starting a secure datanode, and if so, redefine appropriate variables
+if [ "$command" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
+  export HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR
+  export HADOOP_LOG_DIR=$HADOOP_SECURE_DN_LOG_DIR
+  export HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER   
+fi
+
+if [ "$HADOOP_IDENT_STRING" = "" ]; then
+  export HADOOP_IDENT_STRING="$USER"
+fi
+
 # get log directory
 if [ "$HADOOP_LOG_DIR" = "" ]; then
   export HADOOP_LOG_DIR="$HADOOP_HOME/logs"
 fi
 mkdir -p "$HADOOP_LOG_DIR"
+chown $HADOOP_IDENT_STRING $HADOOP_LOG_DIR 
 
 if [ "$HADOOP_PID_DIR" = "" ]; then
   HADOOP_PID_DIR=/tmp
 fi
 
-if [ "$HADOOP_IDENT_STRING" = "" ]; then
-  export HADOOP_IDENT_STRING="$USER"
-fi
-
 # some variables
 export HADOOP_LOGFILE=hadoop-$HADOOP_IDENT_STRING-$command-$HOSTNAME.log
 export HADOOP_ROOT_LOGGER="INFO,DRFA"