Browse Source

HADOOP-9253. Capture ulimit info in the logs at service start time. Contributed by Arpit Gupta.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-1@1443525 13f79535-47bb-0310-9956-ffa450edef68
Suresh Srinivas 12 years ago
parent
commit
898ef1bb8a
2 changed files with 16 additions and 3 deletions
  1. 3 1
      CHANGES.txt
  2. 13 2
      bin/hadoop-daemon.sh

+ 3 - 1
CHANGES.txt

@@ -61,7 +61,6 @@ Release 1.2.0 - unreleased
     NetworkTopology with NodeGroup and use generic code for choosing datanode
     NetworkTopology with NodeGroup and use generic code for choosing datanode
     in Balancer.  (Junping Du via szetszwo)
     in Balancer.  (Junping Du via szetszwo)
 
 
-
     HDFS-4256 Backport concatenation of files into a single file to branch-1
     HDFS-4256 Backport concatenation of files into a single file to branch-1
     (sanjay Radia)
     (sanjay Radia)
 
 
@@ -169,6 +168,9 @@ Release 1.2.0 - unreleased
     MAPREDUCE-4838. Add additional fields like Locality, Avataar to the 
     MAPREDUCE-4838. Add additional fields like Locality, Avataar to the 
     JobHistory logs. (Arun C Murthy and Zhijie Shen via sseth)
     JobHistory logs. (Arun C Murthy and Zhijie Shen via sseth)
 
 
+    HADOOP-9253. Capture ulimit info in the logs at service start time.
+    (Arpit Gupta via suresh)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
     HDFS-2533. Backport: Remove needless synchronization on some FSDataSet
     HDFS-2533. Backport: Remove needless synchronization on some FSDataSet

+ 13 - 2
bin/hadoop-daemon.sh

@@ -76,7 +76,8 @@ fi
 if [ "$command" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
 if [ "$command" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
   export HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR
   export HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR
   export HADOOP_LOG_DIR=$HADOOP_SECURE_DN_LOG_DIR
   export HADOOP_LOG_DIR=$HADOOP_SECURE_DN_LOG_DIR
-  export HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER   
+  export HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER  
+  starting_secure_dn="true"
 fi
 fi
 
 
 if [ "$HADOOP_IDENT_STRING" = "" ]; then
 if [ "$HADOOP_IDENT_STRING" = "" ]; then
@@ -135,7 +136,17 @@ case $startStop in
     cd "$HADOOP_PREFIX"
     cd "$HADOOP_PREFIX"
     nohup nice -n $HADOOP_NICENESS "$HADOOP_PREFIX"/bin/hadoop --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
     nohup nice -n $HADOOP_NICENESS "$HADOOP_PREFIX"/bin/hadoop --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
     echo $! > $pid
     echo $! > $pid
-    sleep 1; head "$log"
+    sleep 1
+    # capture the ulimit output
+    if [ "true" = "$starting_secure_dn" ]; then
+      echo "ulimit -a for secure datanode user $HADOOP_SECURE_DN_USER" >> $log
+      # capture the ulimit info for the appropriate user
+      su --shell=/bin/bash $HADOOP_SECURE_DN_USER -c 'ulimit -a' >> $log 2>&1
+    else
+      echo "ulimit -a for user $USER" >> $log
+      ulimit -a >> $log 2>&1
+    fi
+    head -30 "$log"
     ;;
     ;;
           
           
   (stop)
   (stop)