Browse Source

AMBARI-7197. Add rca properties to log4j.properties from Download Configs. (aonishuk)

Andrew Onishuk 11 years ago
parent
commit
7c8299b1c5

+ 16 - 7
ambari-common/src/main/python/resource_management/libraries/script/script.py

@@ -237,6 +237,20 @@ class Script(object):
     """
     """
     self.fail_with_error('configure method isn\'t implemented')
     self.fail_with_error('configure method isn\'t implemented')
 
 
+  def generate_configs_get_template_file_content(self, filename, dicts):
+    import params
+    content = ''
+    for dict in dicts.split(','):
+      if dict.strip() in params.config['configurations']:
+        content += params.config['configurations'][dict.strip()]['content']
+
+    return content
+
+  def generate_configs_get_xml_file_content(self, filename, dict):
+    import params
+    return {'configurations':params.config['configurations'][dict],
+            'configuration_attributes':params.config['configuration_attributes'][dict]}
+
   def generate_configs(self, env):
   def generate_configs(self, env):
     """
     """
     Generates config files and stores them as an archive in tmp_dir
     Generates config files and stores them as an archive in tmp_dir
@@ -254,17 +268,12 @@ class Script(object):
       for filename, dict in file_dict.iteritems():
       for filename, dict in file_dict.iteritems():
         XmlConfig(filename,
         XmlConfig(filename,
                   conf_dir=conf_tmp_dir,
                   conf_dir=conf_tmp_dir,
-                  configurations=params.config['configurations'][dict],
-                  configuration_attributes=params.config['configuration_attributes'][dict],
+                  **self.generate_configs_get_xml_file_content(filename, dict)
         )
         )
     for file_dict in env_configs_list:
     for file_dict in env_configs_list:
       for filename,dicts in file_dict.iteritems():
       for filename,dicts in file_dict.iteritems():
-        content = ''
-        for dict in dicts.split(','):
-          if dict.strip() in params.config['configurations']:
-            content += params.config['configurations'][dict.strip()]['content']
         File(os.path.join(conf_tmp_dir, filename),
         File(os.path.join(conf_tmp_dir, filename),
-             content=InlineTemplate(content))
+             content=InlineTemplate(self.generate_configs_get_template_file_content(filename, dicts)))
     with closing(tarfile.open(output_filename, "w:gz")) as tar:
     with closing(tarfile.open(output_filename, "w:gz")) as tar:
       tar.add(conf_tmp_dir, arcname=os.path.basename("."))
       tar.add(conf_tmp_dir, arcname=os.path.basename("."))
       tar.close()
       tar.close()

+ 4 - 19
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py

@@ -134,27 +134,12 @@ mapred_local_dir = "/tmp/hadoop-mapred/mapred/local"
 dfs_hosts = default('/configurations/hdfs-site/dfs.hosts', None)
 dfs_hosts = default('/configurations/hdfs-site/dfs.hosts', None)
 
 
 #log4j.properties
 #log4j.properties
-rca_properties = format('''
-ambari.jobhistory.database={ambari_db_rca_url}
-ambari.jobhistory.driver={ambari_db_rca_driver}
-ambari.jobhistory.user={ambari_db_rca_username}
-ambari.jobhistory.password={ambari_db_rca_password}
-ambari.jobhistory.logger=${{hadoop.root.logger}}
+if 'mapred-env' in config['configurations'] and 'rca_properties' in config['configurations']['mapred-env']:
+  rca_properties = format(config['configurations']['mapred-env']['rca_properties'])
 
 
-log4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender
-log4j.appender.JHA.database={ambari_db_rca_url}
-log4j.appender.JHA.driver={ambari_db_rca_driver}
-log4j.appender.JHA.user={ambari_db_rca_username}
-log4j.appender.JHA.password={ambari_db_rca_password}
-
-log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA
-log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
-
-''')
-
-if (('hdfs-log4j' in config['configurations']) and ('content' in config['configurations']['hdfs-log4j'])):
+if 'hdfs-log4j' in config['configurations']:
   log4j_props = config['configurations']['hdfs-log4j']['content']
   log4j_props = config['configurations']['hdfs-log4j']['content']
-  if (('mapreduce-log4j' in config['configurations']) and ('content' in config['configurations']['mapreduce-log4j'])):
+  if 'mapreduce-log4j' in config['configurations']:
     log4j_props += config['configurations']['mapreduce-log4j']['content']
     log4j_props += config['configurations']['mapreduce-log4j']['content']
     if rca_enabled:
     if rca_enabled:
       log4j_props += rca_properties
       log4j_props += rca_properties

+ 6 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py

@@ -47,6 +47,12 @@ class HdfsClient(Script):
     import params
     import params
     hdfs()
     hdfs()
 
 
+  def generate_configs_get_template_file_content(self, filename, dicts):
+    import params
+    content = super(HdfsClient,self).generate_configs_get_template_file_content(filename, dicts)
+    if filename == 'log4j.properties':
+      content += params.rca_properties
+    return content
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
   HdfsClient().execute()
   HdfsClient().execute()

+ 15 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py

@@ -173,4 +173,18 @@ ttnode_heapsize = default("/configurations/mapred-env/ttnode_heapsize","1024m")
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 
 
 mapred_pid_dir_prefix = default("/configurations/hadoop-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
 mapred_pid_dir_prefix = default("/configurations/hadoop-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+
+rca_enabled = False
+if 'mapred-env' in config['configurations']:
+  rca_enabled =  config['configurations']['mapred-env']['rca_enabled']
+
+ambari_db_rca_url = config['hostLevelParams']['ambari_db_rca_url']
+ambari_db_rca_driver = config['hostLevelParams']['ambari_db_rca_driver']
+ambari_db_rca_username = config['hostLevelParams']['ambari_db_rca_username']
+ambari_db_rca_password = config['hostLevelParams']['ambari_db_rca_password']
+
+rca_properties = ''
+if rca_enabled and 'mapreduce-log4j' in config['configurations'] \
+  and 'rca_properties' in config['configurations']['mapred-env']:
+  rca_properties = format(config['configurations']['mapred-env']['rca_properties'])

+ 20 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/configuration/mapred-env.xml

@@ -57,5 +57,25 @@
     <property-type>USER</property-type>
     <property-type>USER</property-type>
     <description>MapReduce User.</description>
     <description>MapReduce User.</description>
   </property>
   </property>
+  <property>
+    <name>rca_properties</name>
+    <value>
+ambari.jobhistory.database={ambari_db_rca_url}
+ambari.jobhistory.driver={ambari_db_rca_driver}
+ambari.jobhistory.user={ambari_db_rca_username}
+ambari.jobhistory.password={ambari_db_rca_password}
+ambari.jobhistory.logger=${{hadoop.root.logger}}
+
+log4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender
+log4j.appender.JHA.database={ambari_db_rca_url}
+log4j.appender.JHA.driver={ambari_db_rca_driver}
+log4j.appender.JHA.user={ambari_db_rca_username}
+log4j.appender.JHA.password={ambari_db_rca_password}
+
+log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA
+log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
+
+    </value>
+  </property>
 
 
 </configuration>
 </configuration>

+ 7 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/client.py

@@ -38,5 +38,12 @@ class Client(Script):
   def status(self, env):
   def status(self, env):
     raise ClientComponentHasNoStatus()
     raise ClientComponentHasNoStatus()
 
 
+  def generate_configs_get_template_file_content(self, filename, dicts):
+    import params
+    content = super(Client,self).generate_configs_get_template_file_content(filename, dicts)
+    if filename == 'log4j.properties':
+     content += params.rca_properties
+    return content
+
 if __name__ == "__main__":
 if __name__ == "__main__":
   Client().execute()
   Client().execute()

+ 13 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py

@@ -78,3 +78,16 @@ HdfsDirectory = functools.partial(
 mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 
 
 slave_hosts = default("/clusterHostInfo/slave_hosts", [])
 slave_hosts = default("/clusterHostInfo/slave_hosts", [])
+
+rca_enabled = False
+if 'mapred-env' in config['configurations']:
+  rca_enabled =  config['configurations']['mapred-env']['rca_enabled']
+
+ambari_db_rca_url = config['hostLevelParams']['ambari_db_rca_url']
+ambari_db_rca_driver = config['hostLevelParams']['ambari_db_rca_driver']
+ambari_db_rca_username = config['hostLevelParams']['ambari_db_rca_username']
+ambari_db_rca_password = config['hostLevelParams']['ambari_db_rca_password']
+
+rca_properties = ''
+if rca_enabled and 'rca_properties' in config['configurations']['mapred-env']:
+  rca_properties = format(config['configurations']['mapred-env']['rca_properties'])

+ 59 - 0
ambari-server/src/test/python/stacks/1.3.2/HDFS/test_hdfs_client.py

@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+import tempfile
+import tarfile
+import contextlib
+from stacks.utils.RMFTestCase import *
+import os
+
+class Test(RMFTestCase):
+
+  @patch.object(tarfile,"open", new = MagicMock())
+  @patch.object(tempfile,"mkdtemp", new = MagicMock(return_value='/tmp/123'))
+  @patch.object(contextlib,"closing", new = MagicMock())
+  @patch("os.path.exists", new = MagicMock(return_value=True))
+  def test_generate_configs_default(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/hdfs_client.py",
+                       classname = "HdfsClient",
+                       command = "generate_configs",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Directory', '/tmp',
+                              recursive = True,
+                              )
+    self.printResources()
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              conf_dir = '/tmp/123',
+                              configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              conf_dir = '/tmp/123',
+                              configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('File', '/tmp/123/log4j.properties',
+                              content = InlineTemplate("log4jproperties\nline2log4jproperties\nline2\nambari.jobhistory.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca\nambari.jobhistory.driver=org.postgresql.Driver\nambari.jobhistory.user=mapred\nambari.jobhistory.password=mapred\nambari.jobhistory.logger=${hadoop.root.logger}\n\nlog4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender\nlog4j.appender.JHA.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca\nlog4j.appender.JHA.driver=org.postgresql.Driver\nlog4j.appender.JHA.user=mapred\nlog4j.appender.JHA.password=mapred\n\nlog4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA\nlog4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true\n\n"),
+                              )
+    self.assertResourceCalled('Directory', '/tmp/123',
+                              action = ['delete'],
+                              )
+    self.assertNoMoreResources()

+ 35 - 1
ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_client.py

@@ -18,6 +18,9 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 '''
 '''
 from mock.mock import MagicMock, call, patch
 from mock.mock import MagicMock, call, patch
+import tempfile
+import tarfile
+import contextlib
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
 import os
 import os
 
 
@@ -165,4 +168,35 @@ class TestMapreduceClient(RMFTestCase):
                               owner = 'mapred',
                               owner = 'mapred',
                               group = 'hadoop',
                               group = 'hadoop',
                               )
                               )
-    self.assertNoMoreResources()
+    self.assertNoMoreResources()
+
+  @patch.object(tarfile,"open", new = MagicMock())
+  @patch.object(tempfile,"mkdtemp", new = MagicMock(return_value='/tmp/123'))
+  @patch.object(contextlib,"closing", new = MagicMock())
+  @patch("os.path.exists", new = MagicMock(return_value=True))
+  def test_generate_configs_default(self):
+    self.executeScript("1.3.2/services/MAPREDUCE/package/scripts/client.py",
+                       classname = "Client",
+                       command = "generate_configs",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Directory', '/tmp',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              conf_dir = '/tmp/123',
+                              configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              conf_dir = '/tmp/123',
+                              configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('File', '/tmp/123/log4j.properties',
+                              content = InlineTemplate("log4jproperties\nline2log4jproperties\nline2\nambari.jobhistory.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca\nambari.jobhistory.driver=org.postgresql.Driver\nambari.jobhistory.user=mapred\nambari.jobhistory.password=mapred\nambari.jobhistory.logger=${hadoop.root.logger}\n\nlog4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender\nlog4j.appender.JHA.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca\nlog4j.appender.JHA.driver=org.postgresql.Driver\nlog4j.appender.JHA.user=mapred\nlog4j.appender.JHA.password=mapred\n\nlog4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA\nlog4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true\n\n"),
+                              )
+    self.assertResourceCalled('Directory', '/tmp/123',
+                              action = ['delete'],
+                              )
+    self.assertNoMoreResources()

+ 7 - 3
ambari-server/src/test/python/stacks/1.3.2/configs/default.json

@@ -32,8 +32,11 @@
         "script": "scripts/datanode.py",
         "script": "scripts/datanode.py",
         "excluded_hosts": "host1,host2",
         "excluded_hosts": "host1,host2",
         "mark_draining_only" : "false",
         "mark_draining_only" : "false",
-        "update_exclude_file_only" : "false"
-    }, 
+        "update_exclude_file_only" : "false",
+        "xml_configs_list":[{"core-site.xml":"core-site"},{"mapred-site.xml":"mapred-site"}],
+        "env_configs_list":[{"log4j.properties":"hdfs-log4j,mapreduce-log4j"}],
+        "output_file":"MAPREDUCE_CLIENT-configs.tar.gz"
+    },
     "taskId": 18, 
     "taskId": 18, 
     "public_hostname": "c6402.ambari.apache.org", 
     "public_hostname": "c6402.ambari.apache.org", 
     "configurations": {
     "configurations": {
@@ -326,7 +329,8 @@
             "jtnode_opt_newsize": "200m", 
             "jtnode_opt_newsize": "200m", 
             "mapred_user": "mapred", 
             "mapred_user": "mapred", 
             "hadoop_heapsize": "1024", 
             "hadoop_heapsize": "1024", 
-            "jtnode_opt_maxnewsize": "200m"
+            "jtnode_opt_maxnewsize": "200m",
+            "rca_properties": "\nambari.jobhistory.database={ambari_db_rca_url}\nambari.jobhistory.driver={ambari_db_rca_driver}\nambari.jobhistory.user={ambari_db_rca_username}\nambari.jobhistory.password={ambari_db_rca_password}\nambari.jobhistory.logger=${{hadoop.root.logger}}\n\nlog4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender\nlog4j.appender.JHA.database={ambari_db_rca_url}\nlog4j.appender.JHA.driver={ambari_db_rca_driver}\nlog4j.appender.JHA.user={ambari_db_rca_username}\nlog4j.appender.JHA.password={ambari_db_rca_password}\n\nlog4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA\nlog4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true\n\n"
         }, 
         }, 
         "nagios-env": {
         "nagios-env": {
             "hive_metastore_user_passwd": "password", 
             "hive_metastore_user_passwd": "password",