瀏覽代碼

HADOOP-7711. Fixed recursive sourcing of HADOOP_OPTS environment
variables (Arpit Gupta via Eric Yang)


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1178852 13f79535-47bb-0310-9956-ffa450edef68

Eric Yang 13 年之前
父節點
當前提交
aa21b472a3

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -621,6 +621,9 @@ Release 0.23.0 - Unreleased
     HADOOP-7685. Improved directory ownership check function in 
     hadoop-setup-conf.sh. (Eric Yang)
 
+    HADOOP-7711. Fixed recursive sourcing of HADOOP_OPTS environment
+    variables (Arpit Gupta via Eric Yang)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh

@@ -282,6 +282,9 @@ else
   HADOOP_SECURE_DN_USER=""
 fi
 
+#unset env vars
+unset HADOOP_CLIENT_OPTS HADOOP_NAMENODE_OPTS HADOOP_JOBTRACKER_OPTS HADOOP_TASKTRACKER_OPTS HADOOP_DATANODE_OPTS HADOOP_SECONDARYNAMENODE_OPTS HADOOP_JAVA_PLATFORM_OPTS
+
 if [ "${AUTOMATED}" != "1" ]; then
   echo "Setup Hadoop Configuration"
   echo

+ 7 - 7
hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-env.sh

@@ -41,16 +41,16 @@ done
 export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS"
 
 # Command specific options appended to HADOOP_OPTS when specified
-export HADOOP_NAMENODE_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}"
-HADOOP_JOBTRACKER_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dmapred.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}"
-HADOOP_TASKTRACKER_OPTS="-Dsecurity.audit.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}"
-HADOOP_DATANODE_OPTS="-Dsecurity.audit.logger=ERROR,DRFAS ${HADOOP_DATANODE_OPTS}"
+export HADOOP_NAMENODE_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT $HADOOP_NAMENODE_OPTS"
+HADOOP_JOBTRACKER_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dmapred.jobsummary.logger=INFO,JSA $HADOOP_JOBTRACKER_OPTS"
+HADOOP_TASKTRACKER_OPTS="-Dsecurity.audit.logger=ERROR,console -Dmapred.audit.logger=ERROR,console $HADOOP_TASKTRACKER_OPTS"
+HADOOP_DATANODE_OPTS="-Dsecurity.audit.logger=ERROR,DRFAS $HADOOP_DATANODE_OPTS"
 
-export HADOOP_SECONDARYNAMENODE_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_SECONDARYNAMENODE_OPTS}"
+export HADOOP_SECONDARYNAMENODE_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT $HADOOP_SECONDARYNAMENODE_OPTS"
 
 # The following applies to multiple commands (fs, dfs, fsck, distcp etc)
-export HADOOP_CLIENT_OPTS="-Xmx128m ${HADOOP_CLIENT_OPTS}"
-#HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData ${HADOOP_JAVA_PLATFORM_OPTS}"
+export HADOOP_CLIENT_OPTS="-Xmx128m $HADOOP_CLIENT_OPTS"
+#HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
 
 # On secure datanodes, user to run the datanode as after dropping privileges
 export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}