Browse Source

AMBARI-7119. log4j does not get used by hadoop as settings are present in hadoop.config.sh (aonishuk)

Andrew Onishuk 11 năm trước cách đây
mục cha
commit
15e4032038

+ 4 - 0
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java

@@ -769,6 +769,10 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
         false);
 
     updateConfigurationProperties("sqoop-env", Collections.singletonMap("sqoop_user", "sqoop"), false, false);
+
+    updateConfigurationProperties("hadoop-env",
+            Collections.singletonMap("hadoop_root_logger", "INFO,RFA"), false,
+            false);
   }
 
   /**

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py

@@ -33,6 +33,7 @@ hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
 hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
 
 #hadoop-env.sh

+ 8 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/configuration/hadoop-env.xml

@@ -31,6 +31,11 @@
     <value>/var/run/hadoop</value>
     <description>Hadoop PID Dir Prefix</description>
   </property>
+  <property>
+    <name>hadoop_root_logger</name>
+    <value>INFO,RFA</value>
+    <description>Hadoop Root Logger</description>
+  </property>
   <property>
     <name>hadoop_heapsize</name>
     <value>1024</value>
@@ -209,6 +214,9 @@ export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 
 #Mostly required for hadoop 2.0
 export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64
+
+#Hadoop logging options
+export HADOOP_ROOT_LOGGER={{hadoop_root_logger}}
     </value>
   </property>
   

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/configuration/hdfs-log4j.xml

@@ -45,6 +45,7 @@
 # limitations under the License.
 
 # Define some default values that can be overridden by system properties
+# To change daemon root logger use hadoop_root_logger in hadoop-env
 hadoop.root.logger=INFO,console
 hadoop.log.dir=.
 hadoop.log.file=hadoop.log

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py

@@ -101,6 +101,7 @@ hadoop_pid_dir_prefix = status_params.hadoop_pid_dir_prefix
 hadoop_bin = "/usr/lib/hadoop/bin"
 
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
 
 dfs_domain_socket_path = "/var/lib/hadoop-hdfs/dn_socket"
 dfs_domain_socket_dir = os.path.dirname(dfs_domain_socket_path)

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py

@@ -33,6 +33,7 @@ hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
 hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
 
 #hadoop-env.sh

+ 8 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/hadoop-env.xml

@@ -31,6 +31,11 @@
     <value>/var/run/hadoop</value>
     <description>Hadoop PID Dir Prefix</description>
   </property>
+  <property>
+    <name>hadoop_root_logger</name>
+    <value>INFO,RFA</value>
+    <description>Hadoop Root Logger</description>
+  </property>
   <property>
     <name>hadoop_heapsize</name>
     <value>1024</value>
@@ -211,6 +216,9 @@ export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 
 #Mostly required for hadoop 2.0
 export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64
+
+#Hadoop logging options
+export HADOOP_ROOT_LOGGER={{hadoop_root_logger}}
     </value>
   </property>
   

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/hdfs-log4j.xml

@@ -47,6 +47,7 @@
 
 
 # Define some default values that can be overridden by system properties
+# To change daemon root logger use hadoop_root_logger in hadoop-env
 hadoop.root.logger=INFO,console
 hadoop.log.dir=.
 hadoop.log.file=hadoop.log

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -106,6 +106,7 @@ hadoop_pid_dir_prefix = status_params.hadoop_pid_dir_prefix
 hadoop_bin = "/usr/lib/hadoop/sbin"
 
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
 
 dfs_domain_socket_path = config['configurations']['hdfs-site']['dfs.domain.socket.path']
 dfs_domain_socket_dir = os.path.dirname(dfs_domain_socket_path)

+ 4 - 0
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java

@@ -300,6 +300,10 @@ public class UpgradeCatalog170Test {
     upgradeCatalog.updateConfigurationProperties("sqoop-env", Collections.singletonMap("sqoop_user", "sqoop"), false, false);
     expectLastCall();
 
+    upgradeCatalog.updateConfigurationProperties("hadoop-env",
+            Collections.singletonMap("hadoop_root_logger", "INFO,RFA"), false, false);
+    expectLastCall();
+
     expect(dbAccessor.executeSelect("SELECT role_name, user_id FROM user_roles")).andReturn(userRolesResultSet).once();
     expect(entityManager.getTransaction()).andReturn(trans).anyTimes();
     expect(entityManager.getCriteriaBuilder()).andReturn(cb).anyTimes();

+ 13 - 0
ambari-web/app/data/HDP2/site_properties.js

@@ -1761,6 +1761,19 @@ module.exports =
       "filename": "hadoop-env.xml",
       "category": "Advanced hadoop-env"
     },
+    {
+      "id": "puppet var",
+      "name": "hadoop_root_logger",
+      "displayName": "Hadoop Root Logger",
+      "description": "Hadoop logging options",
+      "defaultValue": "INFO,RFA",
+      "displayType": "string",
+      "isOverridable": false,
+      "isVisible": true,
+      "serviceName": "HDFS",
+      "filename": "hadoop-env.xml",
+      "category": "Advanced hadoop-env"
+    },
     {
       "id": "puppet var",
       "name": "security_enabled",

+ 13 - 0
ambari-web/app/data/site_properties.js

@@ -936,6 +936,19 @@ module.exports =
       "filename": "hadoop-env.xml",
       "category": "Advanced hadoop-env"
     },
+    {
+      "id": "puppet var",
+      "name": "hadoop_root_logger",
+      "displayName": "Hadoop Root Logger",
+      "description": "Hadoop logging options",
+      "defaultValue": "INFO,RFA",
+      "displayType": "string",
+      "isOverridable": false,
+      "isVisible": true,
+      "serviceName": "HDFS",
+      "filename": "hadoop-env.xml",
+      "category": "Advanced hadoop-env"
+    },
     {
       "id": "puppet var",
       "name": "namenode_opt_maxnewsize",