Browse Source

AMBARI-17159. Upon successful start, log the process id for daemons started. (mpapirkovskyy)

Myroslav Papirkovskyi 9 years ago
parent
commit
fa9ee48431
51 changed files with 265 additions and 49 deletions
  1. 16 4
      ambari-common/src/main/python/resource_management/libraries/functions/flume_agent_helper.py
  2. 26 3
      ambari-common/src/main/python/resource_management/libraries/script/script.py
  3. 12 3
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
  4. 4 0
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py
  5. 4 0
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py
  6. 5 1
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_monitor.py
  7. 14 12
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/status.py
  8. 3 0
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/status_params.py
  9. 4 0
      ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
  10. 4 0
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
  11. 5 1
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
  12. 6 2
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
  13. 6 2
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
  14. 4 0
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
  15. 3 2
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_service.py
  16. 5 1
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py
  17. 4 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
  18. 4 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
  19. 4 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
  20. 4 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
  21. 4 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
  22. 4 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
  23. 6 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
  24. 5 2
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
  25. 6 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
  26. 3 2
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
  27. 2 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py
  28. 4 4
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
  29. 4 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
  30. 4 0
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
  31. 4 0
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
  32. 4 0
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
  33. 4 0
      ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
  34. 4 0
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
  35. 5 1
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
  36. 4 0
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_thrift_server.py
  37. 4 0
      ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
  38. 4 0
      ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_thrift_server.py
  39. 4 0
      ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py
  40. 4 0
      ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py
  41. 4 0
      ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py
  42. 4 0
      ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/rest_api.py
  43. 4 0
      ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor.py
  44. 4 0
      ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/ui_server.py
  45. 6 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
  46. 4 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
  47. 4 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
  48. 4 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
  49. 4 0
      ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py
  50. 4 2
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  51. 3 0
      ambari-server/src/test/python/stacks/2.0.6/configs/default_ams_embedded.json

+ 16 - 4
ambari-common/src/main/python/resource_management/libraries/functions/flume_agent_helper.py

@@ -38,11 +38,8 @@ def get_flume_status(flume_conf_directory, flume_run_directory):
   :param flume_run_directory: the run directory (ie /var/run/flume)
   :return: a list of status information for each expected flume agent
   """
-  meta_files = find_expected_agent_names(flume_conf_directory)
-  pid_files = []
 
-  for agent_name in meta_files:
-    pid_files.append(os.path.join(flume_run_directory, agent_name + '.pid'))
+  pid_files = get_flume_pid_files(flume_conf_directory, flume_run_directory)
 
   processes = []
   for pid_file in pid_files:
@@ -50,6 +47,21 @@ def get_flume_status(flume_conf_directory, flume_run_directory):
 
   return processes
 
+def get_flume_pid_files(flume_conf_directory, flume_run_directory):
+  """
+  Gets the flume agent pid files
+
+  :param flume_conf_directory:  the configuration directory (ie /etc/flume/conf)
+  :param flume_run_directory: the run directory (ie /var/run/flume)
+  :return: a list of pid files for each expected flume agent
+  """
+
+  meta_files = find_expected_agent_names(flume_conf_directory)
+  pid_files = []
+  for agent_name in meta_files:
+    pid_files.append(os.path.join(flume_run_directory, agent_name + '.pid'))
+
+  return pid_files
 
 def find_expected_agent_names(flume_conf_directory):
   """

+ 26 - 3
ambari-common/src/main/python/resource_management/libraries/script/script.py

@@ -35,6 +35,7 @@ from ambari_commons.constants import UPGRADE_TYPE_NON_ROLLING, UPGRADE_TYPE_ROLL
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from resource_management.libraries.resources import XmlConfig
 from resource_management.libraries.resources import PropertiesFile
+from resource_management.core import sudo
 from resource_management.core.resources import File, Directory
 from resource_management.core.source import InlineTemplate
 from resource_management.core.environment import Environment
@@ -255,6 +256,9 @@ class Script(object):
           self.pre_start()
         
         method(env)
+
+        if self.command_name == "start" and not self.is_hook():
+          self.post_start()
     finally:
       if self.should_expose_component_version(self.command_name):
         self.save_component_version_to_structured_out()
@@ -268,6 +272,9 @@ class Script(object):
   
   def get_user(self):
     return ""
+
+  def get_pid_files(self):
+    return []
         
   def pre_start(self):
     if self.log_out_files:
@@ -275,15 +282,30 @@ class Script(object):
       user = self.get_user()
       
       if log_folder == "":
-        Logger.logger.warn("Log folder for current script is not defined")
+        Logger.logger.error("Log folder for current script is not defined")
         return
       
       if user == "":
-        Logger.logger.warn("User for current script is not defined")
+        Logger.logger.error("User for current script is not defined")
         return
       
       show_logs(log_folder, user, lines_count=COUNT_OF_LAST_LINES_OF_OUT_FILES_LOGGED, mask=OUT_FILES_MASK)
 
+  def post_start(self):
+    pid_files = self.get_pid_files()
+    if pid_files == []:
+      Logger.logger.error("Pid files for current script are not defined")
+      return
+
+    pids = []
+    for pid_file in pid_files:
+      if not sudo.path_exists(pid_file):
+        raise Fail("Pid file {0} doesn't exist after starting of the component.")
+
+      pids.append(sudo.read_file(pid_file).strip())
+
+    Logger.info("Component has started with pid(s): {0}".format(', '.join(pids)))
+
   def choose_method_to_execute(self, command_name):
     """
     Returns a callable object that should be executed for a given command.
@@ -689,6 +711,7 @@ class Script(object):
           self.start(env, rolling_restart=(restart_type == "rolling_upgrade"))
         else:
           self.start(env)
+      self.post_start()
 
       if is_stack_upgrade:
         # Remain backward compatible with the rest of the services that haven't switched to using
@@ -809,4 +832,4 @@ class Script(object):
 
   def __init__(self):
     if Script.instance is not None:
-      raise Fail("An instantiation already exists! Use, get_instance() method.")
+      raise Fail("An instantiation already exists! Use, get_instance() method.")

+ 12 - 3
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py

@@ -48,7 +48,11 @@ class AccumuloScript(Script):
   }
 
   def __init__(self, component):
+    import status_params
+    env.set_params(status_params)
+
     self.component = component
+    self.pid_file = format("{pid_dir}/accumulo-{accumulo_user}-{component}.pid")
 
 
   def get_component_name(self):
@@ -93,9 +97,8 @@ class AccumuloScript(Script):
   def status(self, env):
     import status_params
     env.set_params(status_params)
-    component = self.component
-    pid_file = format("{pid_dir}/accumulo-{accumulo_user}-{component}.pid")
-    check_process_status(pid_file)
+
+    check_process_status(self.pid_file)
 
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
@@ -174,6 +177,12 @@ class AccumuloScript(Script):
     import params
     return params.log_dir
 
+  def get_pid_files(self):
+    import status_params
+    env.set_params(status_params)
+
+    return [self.pid_file]
+
   def get_user(self):
     import params
     return params.accumulo_user

+ 4 - 0
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py

@@ -68,6 +68,10 @@ class AmsCollector(Script):
     import params
     return params.ams_user
 
+  def get_pid_files(self):
+    import status
+    return status.get_collector_pid_files()
+
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class AmsCollectorDefault(AmsCollector):

+ 4 - 0
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py

@@ -63,5 +63,9 @@ class AmsGrafana(Script):
     env.set_params(status_params)
     check_service_status(name='grafana')
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.grafana_pid_file]
+
 if __name__ == "__main__":
   AmsGrafana().execute()

+ 5 - 1
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_monitor.py

@@ -58,7 +58,11 @@ class AmsMonitor(Script):
   def get_log_folder(self):
     import params
     return params.ams_monitor_log_dir
-  
+
+  def get_pid_files(self):
+    import status_params
+    return [status_params.monitor_pid_file]
+
   def get_user(self):
     import params
     return params.ams_user

+ 14 - 12
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/status.py

@@ -22,24 +22,26 @@ from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 import os
 
+def get_collector_pid_files():
+  pid_files = []
+  pid_files.append(format("{ams_collector_pid_dir}/ambari-metrics-collector.pid"))
+  pid_files.append(format("{hbase_pid_dir}/hbase-{hbase_user}-master.pid"))
+  if os.path.exists(format("{hbase_pid_dir}/distributed_mode")):
+    pid_files.append(format("{hbase_pid_dir}/hbase-{hbase_user}-regionserver.pid"))
+  return pid_files
+
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def check_service_status(name):
+  import status_params
+  env.set_params(status_params)
+
   if name=='collector':
-    pid_file = format("{ams_collector_pid_dir}/ambari-metrics-collector.pid")
-    check_process_status(pid_file)
-    pid_file = format("{hbase_pid_dir}/hbase-{hbase_user}-master.pid")
-    check_process_status(pid_file)
-    if os.path.exists(format("{hbase_pid_dir}/distributed_mode")):
-      pid_file = format("{hbase_pid_dir}/hbase-{hbase_user}-regionserver.pid")
+    for pid_files in get_collector_pid_files():
       check_process_status(pid_file)
-
   elif name == 'monitor':
-    pid_file = format("{ams_monitor_pid_dir}/ambari-metrics-monitor.pid")
-    check_process_status(pid_file)
-
+    check_process_status(status_params.monitor_pid_file)
   elif name == 'grafana':
-    pid_file = format("{ams_grafana_pid_dir}/grafana-server.pid")
-    check_process_status(pid_file)
+    check_process_status(status_params.grafana_pid_file)
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def check_service_status(name):

+ 3 - 0
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/status_params.py

@@ -32,6 +32,9 @@ ams_collector_pid_dir = config['configurations']['ams-env']['metrics_collector_p
 ams_monitor_pid_dir = config['configurations']['ams-env']['metrics_monitor_pid_dir']
 ams_grafana_pid_dir = config['configurations']['ams-grafana-env']['metrics_grafana_pid_dir']
 
+monitor_pid_file = format("{ams_monitor_pid_dir}/ambari-metrics-monitor.pid")
+grafana_pid_file = format("{ams_grafana_pid_dir}/grafana-server.pid")
+
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 ams_hbase_conf_dir = format("{hbase_conf_dir}")
 

+ 4 - 0
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py

@@ -169,5 +169,9 @@ class MetadataServer(Script):
     import params
     return params.metadata_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.pid_file]
+
 if __name__ == "__main__":
   MetadataServer().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py

@@ -156,6 +156,10 @@ class FalconServerLinux(FalconServer):
     import params
     return params.falcon_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.server_pid_file]
+
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class FalconServerWindows(FalconServer):

+ 5 - 1
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py

@@ -22,7 +22,7 @@ from flume import get_desired_state
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import conf_select, stack_select
-from resource_management.libraries.functions.flume_agent_helper import find_expected_agent_names, get_flume_status
+from resource_management.libraries.functions.flume_agent_helper import find_expected_agent_names, get_flume_status, get_flume_pid_files
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.logger import Logger
 from resource_management.core.resources.service import Service
@@ -100,6 +100,10 @@ class FlumeHandlerLinux(FlumeHandler):
     import params
     return None # means that is run from the same user as ambari is run
 
+  def get_pid_files(self):
+    import params
+    return get_flume_pid_files(params.flume_conf_dir, params.flume_run_dir)
+
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class FlumeHandlerWindows(FlumeHandler):
   def install(self, env):

+ 6 - 2
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py

@@ -93,8 +93,8 @@ class HbaseMasterDefault(HbaseMaster):
   def status(self, env):
     import status_params
     env.set_params(status_params)
-    pid_file = format("{pid_dir}/hbase-{hbase_user}-master.pid")
-    check_process_status(pid_file)
+
+    check_process_status(status_params.hbase_master_pid_file)
 
   def security_status(self, env):
     import status_params
@@ -153,5 +153,9 @@ class HbaseMasterDefault(HbaseMaster):
     import params
     return params.hbase_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.hbase_master_pid_file]
+
 if __name__ == "__main__":
   HbaseMaster().execute()

+ 6 - 2
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py

@@ -101,8 +101,8 @@ class HbaseRegionServerDefault(HbaseRegionServer):
   def status(self, env):
     import status_params
     env.set_params(status_params)
-    pid_file = format("{pid_dir}/hbase-{hbase_user}-regionserver.pid")
-    check_process_status(pid_file)
+
+    check_process_status(status_params.regionserver_pid_file)
 
   def security_status(self, env):
     import status_params
@@ -161,5 +161,9 @@ class HbaseRegionServerDefault(HbaseRegionServer):
     import params
     return params.hbase_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.regionserver_pid_file]
+
 if __name__ == "__main__":
   HbaseRegionServer().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py

@@ -84,5 +84,9 @@ class PhoenixQueryServer(Script):
     import params
     return params.hbase_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.phoenix_pid_file]
+
 if __name__ == "__main__":
   PhoenixQueryServer().execute()

+ 3 - 2
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_service.py

@@ -26,8 +26,9 @@ from resource_management.libraries.functions import check_process_status, format
 
 # Note: Phoenix Query Server is only applicable to phoenix version stacks and above.
 def phoenix_service(action = 'start'): # 'start', 'stop', 'status'
-    # Note: params/status_params should already be imported before calling phoenix_service()
-    pid_file = format("{pid_dir}/phoenix-{hbase_user}-server.pid")
+    # Note: params should already be imported before calling phoenix_service()
+    import status_params
+    pid_file = status_params.phoenix_pid_file
     no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` >/dev/null 2>&1")
 
     if action == "status":

+ 5 - 1
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py

@@ -46,6 +46,10 @@ else:
   pid_dir = config['configurations']['hbase-env']['hbase_pid_dir']
   hbase_user = config['configurations']['hbase-env']['hbase_user']
 
+  hbase_master_pid_file = format("{pid_dir}/hbase-{hbase_user}-master.pid")
+  regionserver_pid_file = format("{pid_dir}/hbase-{hbase_user}-regionserver.pid")
+  phoenix_pid_file = format("{pid_dir}/phoenix-{hbase_user}-server.pid")
+
   # Security related/required params
   hostname = config['hostname']
   security_enabled = config['configurations']['cluster-env']['security_enabled']
@@ -61,4 +65,4 @@ else:
   if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
     hbase_conf_dir = format("{stack_root}/current/{component_directory}/conf")
     
-stack_name = default("/hostLevelParams/stack_name", None)
+stack_name = default("/hostLevelParams/stack_name", None)

+ 4 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py

@@ -164,6 +164,10 @@ class DataNodeDefault(DataNode):
     import params
     return params.hdfs_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.datanode_pid_file]
+
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class DataNodeWindows(DataNode):
   def install(self, env):

+ 4 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py

@@ -168,6 +168,10 @@ class JournalNodeDefault(JournalNode):
     import params
     return params.hdfs_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.journalnode_pid_file]
+
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class JournalNodeWindows(JournalNode):
   def install(self, env):

+ 4 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py

@@ -356,6 +356,10 @@ class NameNodeDefault(NameNode):
     import params
     return params.hdfs_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.namenode_pid_file]
+
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class NameNodeWindows(NameNode):
   def install(self, env):

+ 4 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py

@@ -143,5 +143,9 @@ class NFSGateway(Script):
     import params
     return params.hdfs_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.nfsgateway_pid_file]
+
 if __name__ == "__main__":
   NFSGateway().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py

@@ -144,6 +144,10 @@ class SNameNodeDefault(SNameNode):
     import params
     return params.hdfs_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.snamenode_pid_file]
+
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class SNameNodeWindows(SNameNode):
   pass

+ 4 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py

@@ -158,6 +158,10 @@ class ZkfcSlaveDefault(ZkfcSlave):
     import params
     return params.hdfs_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.zkfc_pid_file]
+
 def initialize_ha_zookeeper(params):
   try:
     iterations = 10

+ 6 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py

@@ -90,11 +90,10 @@ class HiveMetastoreDefault(HiveMetastore):
   def status(self, env):
     import status_params
     from resource_management.libraries.functions import check_process_status
-
     env.set_params(status_params)
-    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
+
     # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
+    check_process_status(status_params.hive_metastore_pid)
 
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
@@ -249,6 +248,10 @@ class HiveMetastoreDefault(HiveMetastore):
     import params
     return params.hive_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.hive_metastore_pid]
+
 
 if __name__ == "__main__":
   HiveMetastore().execute()

+ 5 - 2
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -107,10 +107,9 @@ class HiveServerDefault(HiveServer):
   def status(self, env):
     import status_params
     env.set_params(status_params)
-    pid_file = format("{hive_pid_dir}/{hive_pid}")
 
     # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
+    check_process_status(status_params.hive_pid)
 
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
@@ -207,5 +206,9 @@ class HiveServerDefault(HiveServer):
     import params
     return params.hive_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.hive_pid]
+
 if __name__ == "__main__":
   HiveServer().execute()

+ 6 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py

@@ -140,9 +140,8 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
       # We are not doing 'llap' status check done here as part of status check for 'HSI', as 'llap' status
       # check is a heavy weight operation.
 
-      pid_file = format("{hive_pid_dir}/{hive_interactive_pid}")
       # Recursively check all existing gmetad pid files
-      check_process_status(pid_file)
+      check_process_status(status_params.hive_interactive_pid)
 
     def security_status(self, env):
       HiveServerDefault.security_status(env)
@@ -391,6 +390,10 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
       import params
       return params.hive_user
 
+    def get_pid_files(self):
+      import status_params
+      return [status_params.hive_interactive_pid]
+
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class HiveServerInteractiveWindows(HiveServerInteractive):
 
@@ -398,4 +401,4 @@ class HiveServerInteractiveWindows(HiveServerInteractive):
     pass
 
 if __name__ == "__main__":
-  HiveServerInteractive().execute()
+  HiveServerInteractive().execute()

+ 3 - 2
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py

@@ -55,12 +55,13 @@ def hive_service(name, action='start', upgrade_type=None):
 def hive_service(name, action='start', upgrade_type=None):
 
   import params
+  import status_params
 
   if name == 'metastore':
-    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
+    pid_file = status_params.hive_metastore_pid
     cmd = format("{start_metastore_path} {hive_log_dir}/hive.out {hive_log_dir}/hive.err {pid_file} {hive_server_conf_dir} {hive_log_dir}")
   elif name == 'hiveserver2':
-    pid_file = format("{hive_pid_dir}/{hive_pid}")
+    pid_file = status_params.hive_pid
     cmd = format("{start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.err {pid_file} {hive_server_conf_dir} {hive_log_dir}")
 
 

+ 2 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py

@@ -41,8 +41,9 @@ def hive_service_interactive(name, action='start', upgrade_type=None):
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hive_service_interactive(name, action='start', upgrade_type=None):
   import params
+  import status_params
 
-  pid_file = format("{hive_pid_dir}/{hive_interactive_pid}")
+  pid_file = status_params.hive_interactive_pid
   cmd = format("{start_hiveserver2_interactive_path} {hive_pid_dir}/hive-server2-interactive.out {hive_log_dir}/hive-server2-interactive.err {pid_file} {hive_server_interactive_conf_dir} {hive_log_dir}")
 
   pid = get_user_call_output.get_user_call_output(format("cat {pid_file}"), user=params.hive_user, is_checked_call=False)[1]

+ 4 - 4
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py

@@ -61,9 +61,9 @@ if OSCheck.is_windows_family():
   webhcat_server_win_service_name = "templeton"
 else:
   hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir']
-  hive_pid = 'hive-server.pid'
-  hive_interactive_pid = 'hive-interactive.pid'
-  hive_metastore_pid = 'hive.pid'
+  hive_pid = format("{hive_pid_dir}/hive-server.pid")
+  hive_interactive_pid = format("{hive_pid_dir}/hive-interactive.pid")
+  hive_metastore_pid = format("{hive_pid_dir}/hive.pid")
 
   hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
   webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid')
@@ -118,4 +118,4 @@ else:
   if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE", "HIVE_SERVER_INTERACTIVE"]:
     hive_config_dir = hive_server_conf_dir
     
-stack_name = default("/hostLevelParams/stack_name", None)
+stack_name = default("/hostLevelParams/stack_name", None)

+ 4 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py

@@ -160,5 +160,9 @@ class WebHCatServerDefault(WebHCatServer):
     import params
     return params.webhcat_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.webhcat_pid_file]
+
 if __name__ == "__main__":
   WebHCatServer().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py

@@ -125,5 +125,9 @@ class KafkaBroker(Script):
     import params
     return params.kafka_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.kafka_pid_file]
+
 if __name__ == "__main__":
   KafkaBroker().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py

@@ -273,6 +273,10 @@ class KnoxGatewayDefault(KnoxGateway):
     import params
     return params.knox_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.knox_pid_file]
+
 
 if __name__ == "__main__":
   KnoxGateway().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py

@@ -206,6 +206,10 @@ class OozieServerDefault(OozieServer):
     import params
     return params.oozie_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.pid_file]
+
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class OozieServerWindows(OozieServer):

+ 4 - 0
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py

@@ -92,5 +92,9 @@ class RangerTagsync(Script):
     import params
     return params.unix_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.tagsync_pid_file]
+
 if __name__ == "__main__":
   RangerTagsync().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py

@@ -99,5 +99,9 @@ class JobHistoryServer(Script):
     import params
     return params.spark_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.spark_history_server_pid_file]
+
 if __name__ == "__main__":
   JobHistoryServer().execute()

+ 5 - 1
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py

@@ -57,6 +57,10 @@ class LivyServer(Script):
 
     check_process_status(status_params.livy_server_pid_file)
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.livy_server_pid_file]
+
 
   def get_component_name(self):
     return "livy-server"
@@ -65,4 +69,4 @@ class LivyServer(Script):
     pass
 
 if __name__ == "__main__":
-  LivyServer().execute()
+  LivyServer().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_thrift_server.py

@@ -83,5 +83,9 @@ class SparkThriftServer(Script):
     import params
     return params.hive_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.spark_thrift_server_pid_file]
+
 if __name__ == "__main__":
   SparkThriftServer().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py

@@ -99,5 +99,9 @@ class JobHistoryServer(Script):
     import params
     return params.spark_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.spark_history_server_pid_file]
+
 if __name__ == "__main__":
   JobHistoryServer().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_thrift_server.py

@@ -83,5 +83,9 @@ class SparkThriftServer(Script):
     import params
     return params.hive_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.spark_thrift_server_pid_file]
+
 if __name__ == "__main__":
   SparkThriftServer().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py

@@ -135,5 +135,9 @@ class DrpcServer(Script):
     import params
     return params.storm_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.pid_drpc]
+
 if __name__ == "__main__":
   DrpcServer().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py

@@ -135,6 +135,10 @@ class NimbusDefault(Nimbus):
     import params
     return params.storm_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.pid_nimbus]
+
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class NimbusWindows(Nimbus):
   def start(self, env):

+ 4 - 0
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py

@@ -134,5 +134,9 @@ class PaceMaker(Script):
       import params
       return params.storm_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.pid_pacemaker]
+
 if __name__ == "__main__":
     PaceMaker().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/rest_api.py

@@ -76,6 +76,10 @@ class StormRestApi(Script):
   def get_user(self):
     import params
     return params.storm_user
+
+  def get_pid_files(self):
+    import status_params
+    return [status_params.pid_rest_api]
   
 if __name__ == "__main__":
   StormRestApi().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor.py

@@ -108,6 +108,10 @@ class SupervisorDefault(Supervisor):
     import params
     return params.storm_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.pid_supervisor]
+
 if __name__ == "__main__":
   Supervisor().execute()
 

+ 4 - 0
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/ui_server.py

@@ -176,5 +176,9 @@ class UiServerDefault(UiServer):
     import params
     return params.storm_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.pid_ui]
+
 if __name__ == "__main__":
   UiServer().execute()

+ 6 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py

@@ -151,5 +151,11 @@ class ApplicationTimelineServerDefault(ApplicationTimelineServer):
     import params
     return params.yarn_user
 
+  def get_pid_files(self):
+    import status_params
+    Execute(format("mv {status_params.yarn_historyserver_pid_file_old} {status_params.yarn_historyserver_pid_file}"),
+            only_if = format("test -e {status_params.yarn_historyserver_pid_file_old}", user=status_params.yarn_user))
+    return [status_params.yarn_historyserver_pid_file]
+
 if __name__ == "__main__":
   ApplicationTimelineServer().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py

@@ -186,5 +186,9 @@ class HistoryServerDefault(HistoryServer):
     import params
     return params.mapred_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.mapred_historyserver_pid_file]
+
 if __name__ == "__main__":
   HistoryServer().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py

@@ -167,5 +167,9 @@ class NodemanagerDefault(Nodemanager):
     import params
     return params.yarn_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.nodemanager_pid_file]
+
 if __name__ == "__main__":
   Nodemanager().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py

@@ -282,6 +282,10 @@ class ResourcemanagerDefault(Resourcemanager):
   def get_user(self):
     import params
     return params.yarn_user
+
+  def get_pid_files(self):
+    import status_params
+    return [status_params.resourcemanager_pid_file]
   
 if __name__ == "__main__":
   Resourcemanager().execute()

+ 4 - 0
ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py

@@ -170,6 +170,10 @@ class ZookeeperServerLinux(ZookeeperServer):
     import params
     return params.zk_user
 
+  def get_pid_files(self):
+    import status_params
+    return [status_params.zk_pid_file]
+
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class ZookeeperServerWindows(ZookeeperServer):

+ 4 - 2
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -736,10 +736,11 @@ class TestHiveServer(RMFTestCase):
     except:
       self.assert_configure_default()
 
+  @patch("resource_management.libraries.script.Script.post_start")
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
   @patch("os.path.exists", new = MagicMock(return_value=True))
   @patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
-  def test_stop_during_upgrade(self, copy_to_hdfs_mock):
+  def test_stop_during_upgrade(self, copy_to_hdfs_mock, post_start_mock):
 
     hiveServerVersionOutput = """WARNING: Use "yarn jar" to launch YARN applications.
 Hive 1.2.1.2.3.0.0-2434
@@ -762,8 +763,9 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
       tries=1, user='hive')
 
 
+  @patch("resource_management.libraries.script.Script.post_start")
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
-  def test_stop_during_upgrade_with_default_conf_server(self, copy_to_hdfs_mock):
+  def test_stop_during_upgrade_with_default_conf_server(self, copy_to_hdfs_mock, post_start_mock):
     hiveServerVersionOutput = """WARNING: Use "yarn jar" to launch YARN applications.
 Hive 1.2.1.2.3.0.0-2434
 Subversion git://ip-10-0-0-90.ec2.internal/grid/0/jenkins/workspace/HDP-dal-centos6/bigtop/build/hive/rpm/BUILD/hive-1.2.1.2.3.0.0 -r a77a00ae765a73b2957337e96ed5a0dbb2e60dfb

+ 3 - 0
ambari-server/src/test/python/stacks/2.0.6/configs/default_ams_embedded.json

@@ -42,6 +42,9 @@
     "taskId": 152, 
     "public_hostname": "c6401.ambari.apache.org", 
     "configurations": {
+        "ams-grafana-env" : {
+            "metrics_grafana_pid_dir" : "/var/run/ambari-metrics-monitor"
+        },
         "mapred-site": {
             "mapreduce.jobhistory.address": "c6402.ambari.apache.org:10020", 
             "mapreduce.cluster.administrators": " hadoop",