浏览代码

HADOOP-8316. svn merge -c 1337334 from trunk

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1337336 13f79535-47bb-0310-9956-ffa450edef68
Eli Collins 13 年之前
父节点
当前提交
42b6b5fcca

+ 2 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -376,6 +376,8 @@ Release 2.0.0 - UNRELEASED
     HADOOP-7868. Hadoop native fails to compile when default linker
     option is -Wl,--as-needed. (Trevor Robinson via eli)
 
+    HADOOP-8316. Audit logging should be disabled by default. (eli)
+
 Release 0.23.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/conf/log4j.properties

@@ -102,7 +102,7 @@ log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
 #
 #Security appender
 #
-hadoop.security.logger=INFO,console
+hadoop.security.logger=INFO,NullAppender
 hadoop.security.log.maxfilesize=256MB
 hadoop.security.log.maxbackupindex=20
 log4j.category.SecurityLogger=${hadoop.security.logger}
@@ -126,7 +126,7 @@ log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
 #
 # hdfs audit logging
 #
-hdfs.audit.logger=INFO,console
+hdfs.audit.logger=INFO,NullAppender
 hdfs.audit.log.maxfilesize=256MB
 hdfs.audit.log.maxbackupindex=20
 log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}
@@ -141,7 +141,7 @@ log4j.appender.RFAAUDIT.MaxBackupIndex=${hdfs.audit.log.maxbackupindex}
 #
 # mapred audit logging
 #
-mapred.audit.logger=INFO,console
+mapred.audit.logger=INFO,NullAppender
 mapred.audit.log.maxfilesize=256MB
 mapred.audit.log.maxbackupindex=20
 log4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-env.sh

@@ -45,10 +45,10 @@ done
 export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS"
 
 # Command specific options appended to HADOOP_OPTS when specified
-export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=INFO,RFAAUDIT $HADOOP_NAMENODE_OPTS"
+export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=INFO,NullAppender $HADOOP_NAMENODE_OPTS"
 export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"
 
-export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=INFO,RFAAUDIT $HADOOP_SECONDARYNAMENODE_OPTS"
+export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=INFO,NullAppender $HADOOP_SECONDARYNAMENODE_OPTS"
 
 # The following applies to multiple commands (fs, dfs, fsck, distcp etc)
 export HADOOP_CLIENT_OPTS="-Xmx128m $HADOOP_CLIENT_OPTS"

+ 12 - 9
hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties

@@ -1,9 +1,12 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
+# Copyright 2011 The Apache Software Foundation
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
 #
 #     http://www.apache.org/licenses/LICENSE-2.0
 #
@@ -99,7 +102,7 @@ log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
 #
 #Security appender
 #
-hadoop.security.logger=INFO,console
+hadoop.security.logger=INFO,NullAppender
 hadoop.security.log.maxfilesize=256MB
 hadoop.security.log.maxbackupindex=20
 log4j.category.SecurityLogger=${hadoop.security.logger}
@@ -123,7 +126,7 @@ log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
 #
 # hdfs audit logging
 #
-hdfs.audit.logger=INFO,console
+hdfs.audit.logger=INFO,NullAppender
 hdfs.audit.log.maxfilesize=256MB
 hdfs.audit.log.maxbackupindex=20
 log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}
@@ -138,7 +141,7 @@ log4j.appender.RFAAUDIT.MaxBackupIndex=${hdfs.audit.log.maxbackupindex}
 #
 # mapred audit logging
 #
-mapred.audit.logger=INFO,console
+mapred.audit.logger=INFO,NullAppender
 mapred.audit.log.maxfilesize=256MB
 mapred.audit.log.maxbackupindex=20
 log4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}