Переглянути джерело

Merge -r1174369:1174370 to fix HADOOP-7658

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.20-security@1204703 13f79535-47bb-0310-9956-ffa450edef68
Giridharan Kesavan 13 роки тому
батько
коміт
40c112df01

+ 3 - 0
CHANGES.txt

@@ -433,6 +433,9 @@ Release 0.20.205.0 - 2011.10.06
 
 
     MAPREDUCE-3076. Annotate o.a.h.mapreduce.TestSleepJob with @Ignore since it
     MAPREDUCE-3076. Annotate o.a.h.mapreduce.TestSleepJob with @Ignore since it
     is not a junit test.  (acmurthy via szetszwo)
     is not a junit test.  (acmurthy via szetszwo)
+    
+    HADOOP-7658. Fix hadoop config template for secured and unsecured
+    installation (Eric Yang via gkesavan)
 
 
     HADOOP-7645. HTTP auth tests requiring Kerberos infrastructure are not
     HADOOP-7645. HTTP auth tests requiring Kerberos infrastructure are not
     disabled on branch-0.20-security. (jitendra)
     disabled on branch-0.20-security. (jitendra)

+ 2 - 0
src/packages/hadoop-setup-conf.sh

@@ -422,11 +422,13 @@ if [ "${SECURITY_TYPE}" = "kerberos" ]; then
   HADOOP_DN_ADDR="0.0.0.0:1019"
   HADOOP_DN_ADDR="0.0.0.0:1019"
   HADOOP_DN_HTTP_ADDR="0.0.0.0:1022"
   HADOOP_DN_HTTP_ADDR="0.0.0.0:1022"
   SECURITY="true"
   SECURITY="true"
+  HADOOP_SECURE_DN_USER=${HADOOP_HDFS_USER}
 else
 else
   TASK_CONTROLLER="org.apache.hadoop.mapred.DefaultTaskController"
   TASK_CONTROLLER="org.apache.hadoop.mapred.DefaultTaskController"
   HADOOP_DN_ADDR="0.0.0.0:50010"
   HADOOP_DN_ADDR="0.0.0.0:50010"
   HADOOP_DN_HTTP_ADDR="0.0.0.0:50075"
   HADOOP_DN_HTTP_ADDR="0.0.0.0:50075"
   SECURITY="false"
   SECURITY="false"
+  HADOOP_SECURE_DN_USER=""
 fi
 fi
 
 
 #unset env vars
 #unset env vars

+ 1 - 1
src/packages/templates/conf/hadoop-env.sh

@@ -29,7 +29,7 @@ export HADOOP_CLIENT_OPTS="-Xmx128m $HADOOP_CLIENT_OPTS"
 #HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
 #HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
 
 
 # On secure datanodes, user to run the datanode as after dropping privileges
 # On secure datanodes, user to run the datanode as after dropping privileges
-export HADOOP_SECURE_DN_USER=${HADOOP_HDFS_USER}
+export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}
 
 
 # Where log files are stored.  $HADOOP_HOME/logs by default.
 # Where log files are stored.  $HADOOP_HOME/logs by default.
 export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER
 export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER