Browse Source

AMBARI-4066. nagios fails when hbase is included on Python

Andrew Onischuk 11 years ago
parent
commit
9b32b676e8

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0._/services/HIVE/package/scripts/params.py

@@ -58,7 +58,7 @@ smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 security_enabled = config['configurations']['global']['security_enabled']
 
 kinit_path_local = get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-hive_metatore_keytab_path = default("hive-site/hive.metastore.kerberos.keytab.file","/etc/security/keytabs/hive.service.keytab")
+hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 #hive_env
 hive_conf_dir = "/etc/hive/conf"
@@ -127,4 +127,4 @@ webhcat_user = config['configurations']['global']['webhcat_user']
 hcat_pid_dir = config['configurations']['global']['hcat_pid_dir']   #hcat_pid_dir
 hcat_log_dir = config['configurations']['global']['hcat_log_dir']   #hcat_log_dir
 
-hadoop_conf_dir = config['configurations']['global']['hadoop_conf_dir']
+hadoop_conf_dir = '/etc/hadoop/conf'

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0._/services/HIVE/package/scripts/service_check.py

@@ -30,7 +30,7 @@ class HiveServiceCheck(Script):
     env.set_params(params)
     if params.security_enabled:
       kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")
-      hive_principal_ext = format("principal={hive_metatore_keytab_path}")
+      hive_principal_ext = format("principal={hive_metastore_keytab_path}")
       hive_url_ext = format("{hive_url}/\\;{hive_principal_ext}")
       smoke_cmd = format("{kinit_cmd} env JAVA_HOME={java64_home} {smoke_test_path} {hive_url_ext} {smoke_test_sql}")
     else:

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0._/services/NAGIOS/package/scripts/nagios_server.py

@@ -66,10 +66,10 @@ def remove_conflicting_packages():
   )
 
 def main():
-  command_type = sys.argv[1] if len(sys.argv)>1 else "stop"
+  command_type = sys.argv[1] if len(sys.argv)>1 else "install"
   print "Running "+command_type
-  command_data_file = '/root/workspace/Nagios/input.json'
-  basedir = '/root/workspace/Nagios/main'
+  command_data_file = '/var/lib/ambari-agent/data/command-3.json'
+  basedir = '/root/ambari/ambari-server/src/main/resources/stacks/HDP/2.0._/services/NAGIOS/package'
   stroutfile = '/1.txt'
   sys.argv = ["", command_type, command_data_file, basedir, stroutfile]
   

+ 4 - 3
ambari-server/src/main/resources/stacks/HDP/2.0._/services/NAGIOS/package/scripts/params.py

@@ -66,6 +66,7 @@ datanode_port = get_port_from_url(config['configurations']['hdfs-site']['dfs.dat
 flume_port = "4159"
 hive_metastore_port = config['configurations']['global']['hive_metastore_port'] #"9083"
 templeton_port = config['configurations']['webhcat-site']['templeton.port'] #"50111"
+hbase_rs_port = "60030"
 
 # this is different for HDP1
 nn_metrics_property = "FSNamesystem"
@@ -125,7 +126,7 @@ _flume_hosts = default("/clusterHostInfo/flume_hosts", None)
 _nagios_server_host = default("/clusterHostInfo/nagios_server_host",None)
 _ganglia_server_host = default("/clusterHostInfo/ganglia_server_host",None)
 
-_hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts",None)
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts",None)
 _hive_server_host = default("/clusterHostInfo/hive_server_host",None)
 _oozie_server = default("/clusterHostInfo/oozie_server",None)
 _webhcat_server_host = default("/clusterHostInfo/webhcat_server_host",None)
@@ -149,7 +150,7 @@ hostgroup_defs = {
     'ganglia-server' : _ganglia_server_host,
     'flume-servers' : _flume_hosts,
     'zookeeper-servers' : _zookeeper_hosts,
-    'hbasemasters' : _hbase_master_hosts,
+    'hbasemasters' : hbase_master_hosts,
     'hiveserver' : _hive_server_host,
     'region-servers' : _hbase_rs_hosts,
     'oozie-server' : _oozie_server,
@@ -159,4 +160,4 @@ hostgroup_defs = {
     'nodemanagers' : _nm_hosts,
     'historyserver2' : _hs_host,
     'journalnodes' : _journalnode_hosts
-}
+}