Browse Source

AMBARI-10439 - [WinTP2] Merge HDPWIN YARN package scripts into common services

Artem Baranchuk 10 years ago
parent
commit
316021844b
29 changed files with 607 additions and 1048 deletions
  1. 26 17
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
  2. 27 11
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
  3. 87 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
  4. 22 12
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
  5. 27 17
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
  6. 5 207
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
  7. 229 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
  8. 1 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_windows.py
  9. 49 17
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
  10. 13 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
  11. 52 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
  12. 22 18
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
  13. 25 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
  14. 22 12
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
  15. 0 54
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/application_timeline_server.py
  16. 0 53
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/historyserver.py
  17. 0 105
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/mapred_service_check.py
  18. 0 43
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/mapreduce2_client.py
  19. 0 53
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/nodemanager.py
  20. 0 77
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/resourcemanager.py
  21. 0 68
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/service_check.py
  22. 0 26
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/service_mapping.py
  23. 0 45
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/yarn.py
  24. 0 44
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/yarn_client.py
  25. 0 40
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/container-executor.cfg.j2
  26. 0 21
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/exclude_hosts_list.j2
  27. 0 35
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/mapreduce.conf.j2
  28. 0 38
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/taskcontroller.cfg.j2
  29. 0 35
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/yarn.conf.j2

+ 26 - 17
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py

@@ -25,24 +25,44 @@ from resource_management.libraries.functions.security_commons import build_expec
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties,\
   FILE_TYPE_XML
 from resource_management.libraries.functions.format import format
-
 from yarn import yarn
 from service import service
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
 
-class ApplicationTimelineServer(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-yarn-timelineserver"}
 
+class ApplicationTimelineServer(Script):
   def install(self, env):
     self.install_packages(env)
-    #self.configure(env)
+
+  def start(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    service('timelineserver', action='start')
+
+  def stop(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+    service('timelineserver', action='stop')
 
   def configure(self, env):
     import params
     env.set_params(params)
     yarn(name='apptimelineserver')
 
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class ApplicationTimelineServerWindows(ApplicationTimelineServer):
+  def status(self, env):
+    service('timelineserver', action='status')
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class ApplicationTimelineServerDefault(ApplicationTimelineServer):
+  def get_stack_to_component(self):
+    return {"HDP": "hadoop-yarn-timelineserver"}
+
   def pre_rolling_restart(self, env):
     Logger.info("Executing Rolling Upgrade pre-restart")
     import params
@@ -51,17 +71,6 @@ class ApplicationTimelineServer(Script):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       Execute(format("hdp-select set hadoop-yarn-timelineserver {version}"))
 
-  def start(self, env, rolling_restart=False):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    service('timelineserver', action='start')
-
-  def stop(self, env, rolling_restart=False):
-    import params
-    env.set_params(params)
-    service('timelineserver', action='stop')
-
   def status(self, env):
     import status_params
     env.set_params(status_params)

+ 27 - 11
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py

@@ -26,23 +26,44 @@ from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
-
 from yarn import yarn
 from service import service
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
 
-class HistoryServer(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-mapreduce-historyserver"}
 
+class HistoryServer(Script):
   def install(self, env):
     self.install_packages(env)
 
+  def stop(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+    service('historyserver', action='stop', serviceName='mapreduce')
+
   def configure(self, env):
     import params
     env.set_params(params)
     yarn(name="historyserver")
 
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class HistoryserverWindows(HistoryServer):
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env)
+    service('historyserver', action='start', serviceName='mapreduce')
+
+  def status(self, env):
+    service('historyserver', action='status')
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class HistoryServerDefault(HistoryServer):
+  def get_stack_to_component(self):
+    return {"HDP": "hadoop-mapreduce-historyserver"}
+
   def pre_rolling_restart(self, env):
     Logger.info("Executing Rolling Upgrade pre-restart")
     import params
@@ -59,12 +80,6 @@ class HistoryServer(Script):
     copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
     service('historyserver', action='start', serviceName='mapreduce')
 
-
-  def stop(self, env, rolling_restart=False):
-    import params
-    env.set_params(params)
-    service('historyserver', action='stop', serviceName='mapreduce')
-
   def status(self, env):
     import status_params
     env.set_params(status_params)
@@ -126,5 +141,6 @@ class HistoryServer(Script):
     else:
       self.put_structured_out({"securityState": "UNSECURED"})
 
+
 if __name__ == "__main__":
   HistoryServer().execute()

+ 87 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py

@@ -20,8 +20,94 @@ Ambari Agent
 """
 
 from resource_management import *
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
+
 
 class MapReduce2ServiceCheck(Script):
+  def service_check(self, env):
+    pass
+
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class MapReduce2ServiceCheckWindows(MapReduce2ServiceCheck):
+  def service_check(self, env):
+    import params
+
+    env.set_params(params)
+
+    component_type = 'hs'
+    if params.hadoop_ssl_enabled:
+      component_address = params.hs_webui_address
+    else:
+      component_address = params.hs_webui_address
+
+    validateStatusFileName = "validateYarnComponentStatus.py"
+    validateStatusFilePath = os.path.join(os.path.dirname(params.hadoop_home), "temp", validateStatusFileName)
+    python_executable = sys.executable
+    validateStatusCmd = "{0} {1} {2} -p {3} -s {4}".format(
+      python_executable, validateStatusFilePath, component_type, component_address, params.hadoop_ssl_enabled)
+
+    if params.security_enabled:
+      kinit_cmd = "{0} -kt {1} {2};".format(params.kinit_path_local, params.smoke_user_keytab, params.smokeuser)
+      smoke_cmd = kinit_cmd + validateStatusCmd
+    else:
+      smoke_cmd = validateStatusCmd
+
+    File(validateStatusFilePath,
+         content=StaticFile(validateStatusFileName)
+    )
+
+    Execute(smoke_cmd,
+            tries=3,
+            try_sleep=5,
+            logoutput=True
+    )
+
+    # hadoop_exe = os.path.join(params.hadoop_home, "bin", "hadoop")
+    #
+    # tested_file = os.path.join(params.hadoop_home, "bin", "hadoop.cmd")
+    # jar_path = os.path.join(params.hadoop_mapred2_jar_location, params.hadoopMapredExamplesJarName)
+    # input_file = format("/user/hadoop/mapredsmokeinput")
+    # output_file = format("/user/hadoop/mapredsmokeoutput")
+    # cleanup_cmd = format("cmd /C {hadoop_exe} fs -rm -r -f {output_file} {input_file}")
+    # create_file_cmd = format("cmd /C {hadoop_exe} fs -put {tested_file} {input_file}")
+    # run_wordcount_job = format("cmd /C {hadoop_exe} jar {jar_path} wordcount {input_file} {output_file}")
+    # test_cmd = format("cmd /C {hadoop_exe} fs -test -e {output_file}")
+    #
+    # if params.security_enabled:
+    #   kinit_cmd = "{0} -kt {1} {2};".format(kinit_path_local, smoke_user_keytab, smokeuser)
+    #   Execute(kinit_cmd)
+    #
+    # Execute(cleanup_cmd,
+    #         tries=1,
+    #         try_sleep=5,
+    #         logoutput=True,
+    #         user=params.hdfs_user
+    # )
+    #
+    # Execute(create_file_cmd,
+    #         tries=1,
+    #         try_sleep=5,
+    #         logoutput=True,
+    #         user=params.hdfs_user
+    # )
+    #
+    # Execute(run_wordcount_job,
+    #         tries=1,
+    #         try_sleep=5,
+    #         logoutput=True,
+    #         user=params.hdfs_user
+    # )
+    #
+    # Execute(test_cmd,
+    #         logoutput=True,
+    #         user=params.hdfs_user
+    # )
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class MapReduce2ServiceCheckDefault(MapReduce2ServiceCheck):
   def service_check(self, env):
     import params
     env.set_params(params)
@@ -73,5 +159,6 @@ class MapReduce2ServiceCheck(Script):
                   conf_dir=params.hadoop_conf_dir
     )
 
+
 if __name__ == "__main__":
   MapReduce2ServiceCheck().execute()

+ 22 - 12
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py

@@ -21,21 +21,12 @@ Ambari Agent
 
 import sys
 from resource_management import *
-
 from yarn import yarn
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
 
-class MapReduce2Client(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-client"}
-
-  def pre_rolling_restart(self, env):
-    import params
-    env.set_params(params)
-
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
-      Execute(format("hdp-select set hadoop-client {version}"))
 
+class MapReduce2Client(Script):
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -48,5 +39,24 @@ class MapReduce2Client(Script):
   def status(self, env):
     raise ClientComponentHasNoStatus()
 
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class MapReduce2ClientWindows(MapReduce2Client):
+  pass
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class MapReduce2ClientDefault(MapReduce2Client):
+  def get_stack_to_component(self):
+    return {"HDP": "hadoop-client"}
+
+  def pre_rolling_restart(self, env):
+    import params
+    env.set_params(params)
+
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      Execute(format("hdp-select set hadoop-client {version}"))
+
+
 if __name__ == "__main__":
   MapReduce2Client().execute()

+ 27 - 17
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py

@@ -27,23 +27,44 @@ from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
-
 from yarn import yarn
 from service import service
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
 
-class Nodemanager(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-yarn-nodemanager"}
 
+class Nodemanager(Script):
   def install(self, env):
     self.install_packages(env)
 
+  def stop(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+    service('nodemanager',action='stop')
+
+  def start(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    service('nodemanager',action='start')
+
   def configure(self, env):
     import params
     env.set_params(params)
     yarn(name="nodemanager")
 
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class NodemanagerWindows(Nodemanager):
+  def status(self, env):
+    service('nodemanager', action='status')
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class NodemanagerDefault(Nodemanager):
+  def get_stack_to_component(self):
+    return {"HDP": "hadoop-yarn-nodemanager"}
+
   def pre_rolling_restart(self, env):
     Logger.info("Executing NodeManager Rolling Upgrade pre-restart")
     import params
@@ -52,12 +73,6 @@ class Nodemanager(Script):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       Execute(format("hdp-select set hadoop-yarn-nodemanager {version}"))
 
-  def start(self, env, rolling_restart=False):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    service('nodemanager',action='start')
-
   def post_rolling_restart(self, env):
     Logger.info("Executing NodeManager Rolling Upgrade post-restart")
     import params
@@ -65,12 +80,6 @@ class Nodemanager(Script):
 
     nodemanager_upgrade.post_upgrade_check()
 
-  def stop(self, env, rolling_restart=False):
-    import params
-    env.set_params(params)
-
-    service('nodemanager',action='stop')
-
   def status(self, env):
     import status_params
     env.set_params(status_params)
@@ -136,5 +145,6 @@ class Nodemanager(Script):
     else:
       self.put_structured_out({"securityState": "UNSECURED"})
 
+
 if __name__ == "__main__":
   Nodemanager().execute()

+ 5 - 207
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py

@@ -1,3 +1,4 @@
+#!/usr/bin/env python
 """
 Licensed to the Apache Software Foundation (ASF) under one
 or more contributor license agreements.  See the NOTICE file
@@ -18,212 +19,9 @@ limitations under the License.
 Ambari Agent
 
 """
-import os
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from resource_management import *
-import status_params
+from ambari_commons import OSCheck
 
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-stack_name = default("/hostLevelParams/stack_name", None)
-
-# This is expected to be of the form #.#.#.#
-stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-
-# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
-version = default("/commandParams/version", None)
-
-hostname = config['hostname']
-
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  yarn_role_root = "hadoop-yarn-client"
-  mapred_role_root = "hadoop-mapreduce-client"
-
-  command_role = default("/role", "")
-  if command_role == "APP_TIMELINE_SERVER":
-    yarn_role_root = "hadoop-yarn-timelineserver"
-  elif command_role == "HISTORYSERVER":
-    mapred_role_root = "hadoop-mapreduce-historyserver"
-  elif command_role == "MAPREDUCE2_CLIENT":
-    mapred_role_root = "hadoop-mapreduce-client"
-  elif command_role == "NODEMANAGER":
-    yarn_role_root = "hadoop-yarn-nodemanager"
-  elif command_role == "RESOURCEMANAGER":
-    yarn_role_root = "hadoop-yarn-resourcemanager"
-  elif command_role == "YARN_CLIENT":
-    yarn_role_root = "hadoop-yarn-client"
-
-  hadoop_libexec_dir          = "/usr/hdp/current/hadoop-client/libexec"
-  hadoop_bin                  = "/usr/hdp/current/hadoop-client/sbin"
-  hadoop_bin_dir              = "/usr/hdp/current/hadoop-client/bin"
-
-  hadoop_mapred2_jar_location = format("/usr/hdp/current/{mapred_role_root}")
-  mapred_bin                  = format("/usr/hdp/current/{mapred_role_root}/sbin")
-
-  hadoop_yarn_home            = format("/usr/hdp/current/{yarn_role_root}")
-  yarn_bin                    = format("/usr/hdp/current/{yarn_role_root}/sbin")
-  yarn_container_bin          = format("/usr/hdp/current/{yarn_role_root}/bin")
+if OSCheck.is_windows_family():
+  from params_windows import *
 else:
-  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-  hadoop_bin = "/usr/lib/hadoop/sbin"
-  hadoop_bin_dir = "/usr/bin"
-  hadoop_yarn_home = '/usr/lib/hadoop-yarn'
-  hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
-  mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
-  yarn_bin = "/usr/lib/hadoop-yarn/sbin"
-  yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
-
-hadoop_conf_dir = "/etc/hadoop/conf"
-limits_conf_dir = "/etc/security/limits.d"
-execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + yarn_container_bin
-
-ulimit_cmd = "ulimit -c unlimited;"
-
-mapred_user = status_params.mapred_user
-yarn_user = status_params.yarn_user
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-yarn_executor_container_group = config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-rm_hosts = config['clusterHostInfo']['rm_host']
-rm_host = rm_hosts[0]
-rm_port = config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'].split(':')[-1]
-rm_https_port = "8090"
-# TODO UPGRADE default, update site during upgrade
-rm_nodes_exclude_path = default("/configurations/yarn-site/yarn.resourcemanager.nodes.exclude-path","/etc/hadoop/conf/yarn.exclude")
-
-java64_home = config['hostLevelParams']['java_home']
-hadoop_ssl_enabled = default("/configurations/core-site/hadoop.ssl.enabled", False)
-
-yarn_heapsize = config['configurations']['yarn-env']['yarn_heapsize']
-resourcemanager_heapsize = config['configurations']['yarn-env']['resourcemanager_heapsize']
-nodemanager_heapsize = config['configurations']['yarn-env']['nodemanager_heapsize']
-apptimelineserver_heapsize = default("/configurations/yarn-env/apptimelineserver_heapsize", 1024)
-ats_leveldb_dir = config['configurations']['yarn-site']['yarn.timeline-service.leveldb-timeline-store.path']
-yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
-yarn_pid_dir_prefix = status_params.yarn_pid_dir_prefix
-mapred_pid_dir_prefix = status_params.mapred_pid_dir_prefix
-mapred_log_dir_prefix = config['configurations']['mapred-env']['mapred_log_dir_prefix']
-mapred_env_sh_template = config['configurations']['mapred-env']['content']
-yarn_env_sh_template = config['configurations']['yarn-env']['content']
-
-if len(rm_hosts) > 1:
-  additional_rm_host = rm_hosts[1]
-  rm_webui_address = format("{rm_host}:{rm_port},{additional_rm_host}:{rm_port}")
-  rm_webui_https_address = format("{rm_host}:{rm_https_port},{additional_rm_host}:{rm_https_port}")
-else:
-  rm_webui_address = format("{rm_host}:{rm_port}")
-  rm_webui_https_address = format("{rm_host}:{rm_https_port}")
-
-nm_webui_address = config['configurations']['yarn-site']['yarn.nodemanager.webapp.address']
-hs_webui_address = config['configurations']['mapred-site']['mapreduce.jobhistory.webapp.address']
-nm_address = config['configurations']['yarn-site']['yarn.nodemanager.address']  # still contains 0.0.0.0
-if hostname and nm_address and nm_address.startswith("0.0.0.0:"):
-  nm_address = nm_address.replace("0.0.0.0", hostname)
-
-nm_local_dirs = config['configurations']['yarn-site']['yarn.nodemanager.local-dirs']
-nm_log_dirs = config['configurations']['yarn-site']['yarn.nodemanager.log-dirs']
-
-distrAppJarName = "hadoop-yarn-applications-distributedshell-2.*.jar"
-hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-2.*.jar"
-
-yarn_pid_dir = status_params.yarn_pid_dir
-mapred_pid_dir = status_params.mapred_pid_dir
-
-mapred_log_dir = format("{mapred_log_dir_prefix}/{mapred_user}")
-yarn_log_dir = format("{yarn_log_dir_prefix}/{yarn_user}")
-mapred_job_summary_log = format("{mapred_log_dir_prefix}/{mapred_user}/hadoop-mapreduce.jobsummary.log")
-yarn_job_summary_log = format("{yarn_log_dir_prefix}/{yarn_user}/hadoop-mapreduce.jobsummary.log")
-
-user_group = config['configurations']['cluster-env']['user_group']
-
-#exclude file
-exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", [])
-exclude_file_path = default("/configurations/yarn-site/yarn.resourcemanager.nodes.exclude-path","/etc/hadoop/conf/yarn.exclude")
-
-ats_host = set(default("/clusterHostInfo/app_timeline_server_hosts", []))
-has_ats = not len(ats_host) == 0
-
-# default kinit commands
-rm_kinit_cmd = ""
-yarn_timelineservice_kinit_cmd = ""
-nodemanager_kinit_cmd = ""
-
-if security_enabled:
-  _rm_principal_name = config['configurations']['yarn-site']['yarn.resourcemanager.principal']
-  _rm_principal_name = _rm_principal_name.replace('_HOST',hostname.lower())
-  _rm_keytab = config['configurations']['yarn-site']['yarn.resourcemanager.keytab']
-  rm_kinit_cmd = format("{kinit_path_local} -kt {_rm_keytab} {_rm_principal_name};")
-
-  # YARN timeline security options are only available in HDP Champlain
-  if has_ats:
-    _yarn_timelineservice_principal_name = config['configurations']['yarn-site']['yarn.timeline-service.principal']
-    _yarn_timelineservice_principal_name = _yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
-    _yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
-    yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {_yarn_timelineservice_keytab} {_yarn_timelineservice_principal_name};")
-
-  if 'yarn.nodemanager.principal' in config['configurations']['yarn-site']:
-    _nodemanager_principal_name = default('/configurations/yarn-site/yarn.nodemanager.principal', None)
-    if _nodemanager_principal_name:
-      _nodemanager_principal_name = _nodemanager_principal_name.replace('_HOST', hostname.lower())
-
-    _nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
-    nodemanager_kinit_cmd = format("{kinit_path_local} -kt {_nodemanager_keytab} {_nodemanager_principal_name};")
-
-
-yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']
-yarn_nm_app_log_dir =  config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']
-mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']
-mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']
-jobhistory_heapsize = default("/configurations/mapred-env/jobhistory_heapsize", "900")
-
-# Tez-related properties
-tez_user = config['configurations']['tez-env']['tez_user']
-
-# Tez jars
-tez_local_api_jars = '/usr/lib/tez/tez*.jar'
-tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
-app_dir_files = {tez_local_api_jars:None}
-
-# Tez libraries
-tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
-
-#for create_hdfs_directory
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
-update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
-
-mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
-
-#taskcontroller.cfg
-
-mapred_local_dir = "/tmp/hadoop-mapred/mapred/local"
-hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
-min_user_id = config['configurations']['yarn-env']['min_user_id']
-
-# Node labels
-node_labels_dir = default("/configurations/yarn-site/yarn.node-labels.fs-store.root-dir", None)
-node_label_enable = config['configurations']['yarn-site']['yarn.node-labels.enabled']
-
-cgroups_dir = "/cgroups_test/cpu"
+  from params_linux import *

+ 229 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py

@@ -0,0 +1,229 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import os
+from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.default import default
+from resource_management import *
+import status_params
+
+# server configurations
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+stack_name = default("/hostLevelParams/stack_name", None)
+
+# This is expected to be of the form #.#.#.#
+stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+
+# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
+version = default("/commandParams/version", None)
+
+hostname = config['hostname']
+
+#hadoop params
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+  yarn_role_root = "hadoop-yarn-client"
+  mapred_role_root = "hadoop-mapreduce-client"
+
+  command_role = default("/role", "")
+  if command_role == "APP_TIMELINE_SERVER":
+    yarn_role_root = "hadoop-yarn-timelineserver"
+  elif command_role == "HISTORYSERVER":
+    mapred_role_root = "hadoop-mapreduce-historyserver"
+  elif command_role == "MAPREDUCE2_CLIENT":
+    mapred_role_root = "hadoop-mapreduce-client"
+  elif command_role == "NODEMANAGER":
+    yarn_role_root = "hadoop-yarn-nodemanager"
+  elif command_role == "RESOURCEMANAGER":
+    yarn_role_root = "hadoop-yarn-resourcemanager"
+  elif command_role == "YARN_CLIENT":
+    yarn_role_root = "hadoop-yarn-client"
+
+  hadoop_libexec_dir          = "/usr/hdp/current/hadoop-client/libexec"
+  hadoop_bin                  = "/usr/hdp/current/hadoop-client/sbin"
+  hadoop_bin_dir              = "/usr/hdp/current/hadoop-client/bin"
+
+  hadoop_mapred2_jar_location = format("/usr/hdp/current/{mapred_role_root}")
+  mapred_bin                  = format("/usr/hdp/current/{mapred_role_root}/sbin")
+
+  hadoop_yarn_home            = format("/usr/hdp/current/{yarn_role_root}")
+  yarn_bin                    = format("/usr/hdp/current/{yarn_role_root}/sbin")
+  yarn_container_bin          = format("/usr/hdp/current/{yarn_role_root}/bin")
+else:
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+  hadoop_bin = "/usr/lib/hadoop/sbin"
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_yarn_home = '/usr/lib/hadoop-yarn'
+  hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
+  mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
+  yarn_bin = "/usr/lib/hadoop-yarn/sbin"
+  yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+limits_conf_dir = "/etc/security/limits.d"
+execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + yarn_container_bin
+
+ulimit_cmd = "ulimit -c unlimited;"
+
+mapred_user = status_params.mapred_user
+yarn_user = status_params.yarn_user
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+
+smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
+yarn_executor_container_group = config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
+kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+rm_hosts = config['clusterHostInfo']['rm_host']
+rm_host = rm_hosts[0]
+rm_port = config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'].split(':')[-1]
+rm_https_port = "8090"
+# TODO UPGRADE default, update site during upgrade
+rm_nodes_exclude_path = default("/configurations/yarn-site/yarn.resourcemanager.nodes.exclude-path","/etc/hadoop/conf/yarn.exclude")
+
+java64_home = config['hostLevelParams']['java_home']
+hadoop_ssl_enabled = default("/configurations/core-site/hadoop.ssl.enabled", False)
+
+yarn_heapsize = config['configurations']['yarn-env']['yarn_heapsize']
+resourcemanager_heapsize = config['configurations']['yarn-env']['resourcemanager_heapsize']
+nodemanager_heapsize = config['configurations']['yarn-env']['nodemanager_heapsize']
+apptimelineserver_heapsize = default("/configurations/yarn-env/apptimelineserver_heapsize", 1024)
+ats_leveldb_dir = config['configurations']['yarn-site']['yarn.timeline-service.leveldb-timeline-store.path']
+yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
+yarn_pid_dir_prefix = status_params.yarn_pid_dir_prefix
+mapred_pid_dir_prefix = status_params.mapred_pid_dir_prefix
+mapred_log_dir_prefix = config['configurations']['mapred-env']['mapred_log_dir_prefix']
+mapred_env_sh_template = config['configurations']['mapred-env']['content']
+yarn_env_sh_template = config['configurations']['yarn-env']['content']
+
+if len(rm_hosts) > 1:
+  additional_rm_host = rm_hosts[1]
+  rm_webui_address = format("{rm_host}:{rm_port},{additional_rm_host}:{rm_port}")
+  rm_webui_https_address = format("{rm_host}:{rm_https_port},{additional_rm_host}:{rm_https_port}")
+else:
+  rm_webui_address = format("{rm_host}:{rm_port}")
+  rm_webui_https_address = format("{rm_host}:{rm_https_port}")
+
+nm_webui_address = config['configurations']['yarn-site']['yarn.nodemanager.webapp.address']
+hs_webui_address = config['configurations']['mapred-site']['mapreduce.jobhistory.webapp.address']
+nm_address = config['configurations']['yarn-site']['yarn.nodemanager.address']  # still contains 0.0.0.0
+if hostname and nm_address and nm_address.startswith("0.0.0.0:"):
+  nm_address = nm_address.replace("0.0.0.0", hostname)
+
+nm_local_dirs = config['configurations']['yarn-site']['yarn.nodemanager.local-dirs']
+nm_log_dirs = config['configurations']['yarn-site']['yarn.nodemanager.log-dirs']
+
+distrAppJarName = "hadoop-yarn-applications-distributedshell-2.*.jar"
+hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-2.*.jar"
+
+yarn_pid_dir = status_params.yarn_pid_dir
+mapred_pid_dir = status_params.mapred_pid_dir
+
+mapred_log_dir = format("{mapred_log_dir_prefix}/{mapred_user}")
+yarn_log_dir = format("{yarn_log_dir_prefix}/{yarn_user}")
+mapred_job_summary_log = format("{mapred_log_dir_prefix}/{mapred_user}/hadoop-mapreduce.jobsummary.log")
+yarn_job_summary_log = format("{yarn_log_dir_prefix}/{yarn_user}/hadoop-mapreduce.jobsummary.log")
+
+user_group = config['configurations']['cluster-env']['user_group']
+
+#exclude file
+exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", [])
+exclude_file_path = default("/configurations/yarn-site/yarn.resourcemanager.nodes.exclude-path","/etc/hadoop/conf/yarn.exclude")
+
+ats_host = set(default("/clusterHostInfo/app_timeline_server_hosts", []))
+has_ats = not len(ats_host) == 0
+
+# default kinit commands
+rm_kinit_cmd = ""
+yarn_timelineservice_kinit_cmd = ""
+nodemanager_kinit_cmd = ""
+
+if security_enabled:
+  _rm_principal_name = config['configurations']['yarn-site']['yarn.resourcemanager.principal']
+  _rm_principal_name = _rm_principal_name.replace('_HOST',hostname.lower())
+  _rm_keytab = config['configurations']['yarn-site']['yarn.resourcemanager.keytab']
+  rm_kinit_cmd = format("{kinit_path_local} -kt {_rm_keytab} {_rm_principal_name};")
+
+  # YARN timeline security options are only available in HDP Champlain
+  if has_ats:
+    _yarn_timelineservice_principal_name = config['configurations']['yarn-site']['yarn.timeline-service.principal']
+    _yarn_timelineservice_principal_name = _yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
+    _yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
+    yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {_yarn_timelineservice_keytab} {_yarn_timelineservice_principal_name};")
+
+  if 'yarn.nodemanager.principal' in config['configurations']['yarn-site']:
+    _nodemanager_principal_name = default('/configurations/yarn-site/yarn.nodemanager.principal', None)
+    if _nodemanager_principal_name:
+      _nodemanager_principal_name = _nodemanager_principal_name.replace('_HOST', hostname.lower())
+
+    _nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
+    nodemanager_kinit_cmd = format("{kinit_path_local} -kt {_nodemanager_keytab} {_nodemanager_principal_name};")
+
+
+yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']
+yarn_nm_app_log_dir =  config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']
+mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']
+mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']
+jobhistory_heapsize = default("/configurations/mapred-env/jobhistory_heapsize", "900")
+
+# Tez-related properties
+tez_user = config['configurations']['tez-env']['tez_user']
+
+# Tez jars
+tez_local_api_jars = '/usr/lib/tez/tez*.jar'
+tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+app_dir_files = {tez_local_api_jars:None}
+
+# Tez libraries
+tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
+
+#for create_hdfs_directory
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
+)
+update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
+
+mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
+
+#taskcontroller.cfg
+
+mapred_local_dir = "/tmp/hadoop-mapred/mapred/local"
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+min_user_id = config['configurations']['yarn-env']['min_user_id']
+
+# Node labels
+node_labels_dir = default("/configurations/yarn-site/yarn.node-labels.fs-store.root-dir", None)
+node_label_enable = config['configurations']['yarn-site']['yarn.node-labels.enabled']
+
+cgroups_dir = "/cgroups_test/cpu"

+ 1 - 0
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/params.py → ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_windows.py

@@ -22,6 +22,7 @@ Ambari Agent
 from resource_management import *
 from resource_management.libraries import functions
 import os
+from status_params import *
 
 # server configurations
 config = Script.get_config()

+ 49 - 17
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py

@@ -28,22 +28,65 @@ from resource_management.libraries.functions.security_commons import build_expec
 from install_jars import install_tez_jars
 from yarn import yarn
 from service import service
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
 
 
-class Resourcemanager(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-yarn-resourcemanager"}
 
+class Resourcemanager(Script):
   def install(self, env):
     self.install_packages(env)
 
-  def configure(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
+    env.set_params(params)
+    service('resourcemanager', action='stop')
 
+  def configure(self, env):
+    import params
     env.set_params(params)
     yarn(name='resourcemanager')
 
+  def refreshqueues(self, env):
+    pass
+
+
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class ResourcemanagerWindows(Resourcemanager):
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env)
+    service('resourcemanager', action='start')
+
+  def status(self, env):
+    service('resourcemanager', action='status')
+
+  def decommission(self, env):
+    import params
+
+    env.set_params(params)
+    yarn_user = params.yarn_user
+
+    yarn_refresh_cmd = format("cmd /c yarn rmadmin -refreshNodes")
+
+    File(params.exclude_file_path,
+         content=Template("exclude_hosts_list.j2"),
+         owner=yarn_user,
+         mode="f"
+    )
+
+    if params.update_exclude_file_only == False:
+      Execute(yarn_refresh_cmd, user=yarn_user)
+
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class ResourcemanagerDefault(Resourcemanager):
+  def get_stack_to_component(self):
+    return {"HDP": "hadoop-yarn-resourcemanager"}
+
   def pre_rolling_restart(self, env):
     Logger.info("Executing Rolling Upgrade post-restart")
     import params
@@ -62,18 +105,7 @@ class Resourcemanager(Script):
     else:
       # will work only for stack versions >=2.2
       copy_tarballs_to_hdfs('tez', 'hadoop-yarn-resourcemanager', params.tez_user, params.hdfs_user, params.user_group)
-    service('resourcemanager',
-            action='start'
-    )
-
-  def stop(self, env, rolling_restart=False):
-    import params
-
-    env.set_params(params)
-
-    service('resourcemanager',
-            action='stop'
-    )
+    service('resourcemanager', action='start')
 
   def status(self, env):
     import status_params

+ 13 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py

@@ -20,8 +20,21 @@ Ambari Agent
 """
 
 from resource_management import *
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
+
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def service(componentName, action='start', serviceName='yarn'):
+  import params
+  if componentName == 'resourcemanager' or componentName == 'nodemanager' or componentName == 'historyserver' or componentName == 'timelineserver':
+    service_name = params.service_map[componentName]
+    if action == 'start' or action == 'stop':
+      Service(service_name, action=action)
+    elif action == 'status':
+      check_windows_service_status(service_name)
 
 
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def service(componentName, action='start', serviceName='yarn'):
 
   import params

+ 52 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py

@@ -21,8 +21,59 @@ Ambari Agent
 
 from resource_management import *
 import sys
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
+
 
 class ServiceCheck(Script):
+  def service_check(self, env):
+    pass
+
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class ServiceCheckWindows(ServiceCheck):
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+
+    yarn_exe = os_utils.quote_path(os.path.join(params.yarn_home, "bin", "yarn.cmd"))
+
+    run_yarn_check_cmd = "cmd /C %s node -list" % yarn_exe
+
+    component_type = 'rm'
+    if params.hadoop_ssl_enabled:
+      component_address = params.rm_webui_https_address
+    else:
+      component_address = params.rm_webui_address
+
+    #temp_dir = os.path.abspath(os.path.join(params.hadoop_home, os.pardir)), "/tmp"
+    temp_dir = os.path.join(os.path.dirname(params.hadoop_home), "temp")
+    validateStatusFileName = "validateYarnComponentStatus.py"
+    validateStatusFilePath = os.path.join(temp_dir, validateStatusFileName)
+    python_executable = sys.executable
+    validateStatusCmd = "%s %s %s -p %s -s %s" % (python_executable, validateStatusFilePath, component_type, component_address, params.hadoop_ssl_enabled)
+
+    if params.security_enabled:
+      kinit_cmd = "%s -kt %s %s;" % (params.kinit_path_local, params.smoke_user_keytab, params.smokeuser)
+      smoke_cmd = kinit_cmd + ' ' + validateStatusCmd
+    else:
+      smoke_cmd = validateStatusCmd
+
+    File(validateStatusFilePath,
+         content=StaticFile(validateStatusFileName)
+    )
+
+    Execute(smoke_cmd,
+            tries=3,
+            try_sleep=5,
+            logoutput=True
+    )
+
+    Execute(run_yarn_check_cmd, logoutput=True)
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class ServiceCheckDefault(ServiceCheck):
   def service_check(self, env):
     import params
     env.set_params(params)
@@ -64,5 +115,6 @@ class ServiceCheck(Script):
             user=params.smokeuser
     )
 
+
 if __name__ == "__main__":
   ServiceCheck().execute()

+ 22 - 18
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py

@@ -19,25 +19,29 @@ limitations under the License.
 """
 
 from resource_management import *
+from ambari_commons import OSCheck
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-mapred_user = config['configurations']['mapred-env']['mapred_user']
-yarn_user = config['configurations']['yarn-env']['yarn_user']
-yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
-mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
-yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
-mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
-
-resourcemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-resourcemanager.pid")
-nodemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-nodemanager.pid")
-yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/yarn-{yarn_user}-historyserver.pid")
-yarn_historyserver_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
-mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
-
-# Security related/required params
-hadoop_conf_dir = "/etc/hadoop/conf"
-hostname = config['hostname']
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-security_enabled = config['configurations']['cluster-env']['security_enabled']
+if OSCheck.is_windows_family():
+  service_map = {'resourcemanager':'resourcemanager', 'nodemanager':'nodemanager', 'historyserver':'jobhistoryserver', 'timelineserver':'historyserver'}
+else:
+  mapred_user = config['configurations']['mapred-env']['mapred_user']
+  yarn_user = config['configurations']['yarn-env']['yarn_user']
+  yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
+  mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
+  yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
+  mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
+
+  resourcemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-resourcemanager.pid")
+  nodemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-nodemanager.pid")
+  yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/yarn-{yarn_user}-historyserver.pid")
+  yarn_historyserver_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
+  mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
+
+  # Security related/required params
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hostname = config['hostname']
+  kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+  security_enabled = config['configurations']['cluster-env']['security_enabled']

+ 25 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py

@@ -22,12 +22,37 @@ Ambari Agent
 from resource_management import *
 import sys
 import os
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
 
 
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def yarn(name = None):
   import params
+  XmlConfig("mapred-site.xml",
+            conf_dir=params.config_dir,
+            configurations=params.config['configurations']['mapred-site'],
+            owner=params.yarn_user,
+            mode='f'
+  )
+  XmlConfig("yarn-site.xml",
+            conf_dir=params.config_dir,
+            configurations=params.config['configurations']['yarn-site'],
+            owner=params.yarn_user,
+            mode='f',
+            configuration_attributes=params.config['configuration_attributes']['yarn-site']
+  )
+  XmlConfig("capacity-scheduler.xml",
+            conf_dir=params.config_dir,
+            configurations=params.config['configurations']['capacity-scheduler'],
+            owner=params.yarn_user,
+            mode='f'
+  )
 
 
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
+def yarn(name = None):
+  import params
   if name in ["nodemanager","historyserver"]:
     if params.yarn_log_aggregation_enabled:
       params.HdfsDirectory(params.yarn_nm_app_log_dir,

+ 22 - 12
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py

@@ -21,21 +21,12 @@ Ambari Agent
 
 import sys
 from resource_management import *
-
 from yarn import yarn
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
 
-class YarnClient(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-client"}
-
-  def pre_rolling_restart(self, env):
-    import params
-    env.set_params(params)
-
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
-      Execute(format("hdp-select set hadoop-client {version}"))
 
+class YarnClient(Script):
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -48,5 +39,24 @@ class YarnClient(Script):
   def status(self, env):
     raise ClientComponentHasNoStatus()
 
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class YarnClientWindows(YarnClient):
+  pass
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class YarnClientDefault(YarnClient):
+  def get_stack_to_component(self):
+    return {"HDP": "hadoop-client"}
+
+  def pre_rolling_restart(self, env):
+    import params
+    env.set_params(params)
+
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      Execute(format("hdp-select set hadoop-client {version}"))
+
+
 if __name__ == "__main__":
   YarnClient().execute()

+ 0 - 54
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/application_timeline_server.py

@@ -1,54 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-from yarn import yarn
-import service_mapping
-
-class ApplicationTimelineServer(Script):
-
-  def install(self, env):
-    if not check_windows_service_exists(service_mapping.apptimelineserver_win_service_name):
-      self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    yarn()
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env)
-    Service(service_mapping.apptimelineserver_win_service_name, action="start")
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-    Service(service_mapping.apptimelineserver_win_service_name, action="stop")
-
-  def status(self, env):
-    import params
-    check_windows_service_status(service_mapping.apptimelineserver_win_service_name)
-
-if __name__ == "__main__":
-  ApplicationTimelineServer().execute()

+ 0 - 53
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/historyserver.py

@@ -1,53 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-from yarn import yarn
-import service_mapping
-
-class Historyserver(Script):
-
-  def install(self, env):
-    if not check_windows_service_exists(service_mapping.historyserver_win_service_name):
-      self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    yarn()
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env)
-    Service(service_mapping.historyserver_win_service_name, action="start")
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-    Service(service_mapping.historyserver_win_service_name, action="stop")
-
-  def status(self, env):
-    check_windows_service_status(service_mapping.historyserver_win_service_name)
-
-if __name__ == "__main__":
-  Historyserver().execute()

+ 0 - 105
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/mapred_service_check.py

@@ -1,105 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-from resource_management.libraries import functions
-import sys
-import os
-
-
-class MapReduce2ServiceCheck(Script):
-  def service_check(self, env):
-    import params
-
-    env.set_params(params)
-
-    component_type = 'hs'
-    if params.hadoop_ssl_enabled:
-      component_address = params.hs_webui_address
-    else:
-      component_address = params.hs_webui_address
-
-    validateStatusFileName = "validateYarnComponentStatus.py"
-    validateStatusFilePath = os.path.join(os.path.dirname(params.hadoop_home), "temp", validateStatusFileName)
-    python_executable = sys.executable
-    validateStatusCmd = "{0} {1} {2} -p {3} -s {4}".format(
-      python_executable, validateStatusFilePath, component_type, component_address, params.hadoop_ssl_enabled)
-
-    if params.security_enabled:
-      kinit_cmd = "{0} -kt {1} {2};".format(params.kinit_path_local, params.smoke_user_keytab, params.smokeuser)
-      smoke_cmd = kinit_cmd + validateStatusCmd
-    else:
-      smoke_cmd = validateStatusCmd
-
-    File(validateStatusFilePath,
-         content=StaticFile(validateStatusFileName)
-    )
-
-    Execute(smoke_cmd,
-            tries=3,
-            try_sleep=5,
-            logoutput=True
-    )
-
-    # hadoop_exe = os.path.join(params.hadoop_home, "bin", "hadoop")
-    #
-    # tested_file = os.path.join(params.hadoop_home, "bin", "hadoop.cmd")
-    # jar_path = os.path.join(params.hadoop_mapred2_jar_location, params.hadoopMapredExamplesJarName)
-    # input_file = format("/user/hadoop/mapredsmokeinput")
-    # output_file = format("/user/hadoop/mapredsmokeoutput")
-    # cleanup_cmd = format("cmd /C {hadoop_exe} fs -rm -r -f {output_file} {input_file}")
-    # create_file_cmd = format("cmd /C {hadoop_exe} fs -put {tested_file} {input_file}")
-    # run_wordcount_job = format("cmd /C {hadoop_exe} jar {jar_path} wordcount {input_file} {output_file}")
-    # test_cmd = format("cmd /C {hadoop_exe} fs -test -e {output_file}")
-    #
-    # if params.security_enabled:
-    #   kinit_cmd = "{0} -kt {1} {2};".format(kinit_path_local, smoke_user_keytab, smokeuser)
-    #   Execute(kinit_cmd)
-    #
-    # Execute(cleanup_cmd,
-    #         tries=1,
-    #         try_sleep=5,
-    #         logoutput=True,
-    #         user=params.hdfs_user
-    # )
-    #
-    # Execute(create_file_cmd,
-    #         tries=1,
-    #         try_sleep=5,
-    #         logoutput=True,
-    #         user=params.hdfs_user
-    # )
-    #
-    # Execute(run_wordcount_job,
-    #         tries=1,
-    #         try_sleep=5,
-    #         logoutput=True,
-    #         user=params.hdfs_user
-    # )
-    #
-    # Execute(test_cmd,
-    #         logoutput=True,
-    #         user=params.hdfs_user
-    # )
-
-
-if __name__ == "__main__":
-  MapReduce2ServiceCheck().execute()

+ 0 - 43
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/mapreduce2_client.py

@@ -1,43 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-from yarn import yarn
-import os
-
-class MapReduce2Client(Script):
-
-  def install(self, env):
-    # client checks env var to determine if it is installed
-    if not os.environ.has_key("HADOOP_CONF_DIR"):
-      self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    yarn()
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-if __name__ == "__main__":
-  MapReduce2Client().execute()

+ 0 - 53
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/nodemanager.py

@@ -1,53 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-from yarn import yarn
-import service_mapping
-
-class Nodemanager(Script):
-
-  def install(self, env):
-    if not check_windows_service_exists(service_mapping.nodemanager_win_service_name):
-      self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    yarn()
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env)
-    Service(service_mapping.nodemanager_win_service_name, action="start")
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-    Service(service_mapping.nodemanager_win_service_name, action="stop")
-
-  def status(self, env):
-    check_windows_service_status(service_mapping.nodemanager_win_service_name)
-
-if __name__ == "__main__":
-  Nodemanager().execute()

+ 0 - 77
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/resourcemanager.py

@@ -1,77 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-from yarn import yarn
-import service_mapping
-
-class Resourcemanager(Script):
-
-  def install(self, env):
-    import params
-    if not check_windows_service_exists(service_mapping.resourcemanager_win_service_name):
-      self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    yarn()
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env)
-    Service(service_mapping.resourcemanager_win_service_name, action="start")
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-    Service(service_mapping.resourcemanager_win_service_name, action="stop")
-
-  def status(self, env):
-    check_windows_service_status(service_mapping.resourcemanager_win_service_name)
-
-  def refreshqueues(self, env):
-    pass
-
-  def decommission(self, env):
-      import params
-
-      env.set_params(params)
-      yarn_user = params.yarn_user
-
-      yarn_refresh_cmd = format("cmd /c yarn rmadmin -refreshNodes")
-
-      File(params.exclude_file_path,
-           content=Template("exclude_hosts_list.j2"),
-           owner=yarn_user,
-           mode="f"
-      )
-
-      if params.update_exclude_file_only == False:
-          Execute(yarn_refresh_cmd,
-                  user=yarn_user)
-          pass
-      pass
-
-if __name__ == "__main__":
-  Resourcemanager().execute()

+ 0 - 68
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/service_check.py

@@ -1,68 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from ambari_commons import os_utils
-
-from resource_management import *
-import sys
-import os
-
-class ServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-
-    yarn_exe = os_utils.quote_path(os.path.join(params.yarn_home, "bin", "yarn.cmd"))
-
-    run_yarn_check_cmd = "cmd /C %s node -list" % yarn_exe
-
-    component_type = 'rm'
-    if params.hadoop_ssl_enabled:
-      component_address = params.rm_webui_https_address
-    else:
-      component_address = params.rm_webui_address
-
-    #temp_dir = os.path.abspath(os.path.join(params.hadoop_home, os.pardir)), "/tmp"
-    temp_dir = os.path.join(os.path.dirname(params.hadoop_home), "temp")
-    validateStatusFileName = "validateYarnComponentStatus.py"
-    validateStatusFilePath = os.path.join(temp_dir, validateStatusFileName)
-    python_executable = sys.executable
-    validateStatusCmd = "%s %s %s -p %s -s %s" % (python_executable, validateStatusFilePath, component_type, component_address, params.hadoop_ssl_enabled)
-
-    if params.security_enabled:
-      kinit_cmd = "%s -kt %s %s;" % (params.kinit_path_local, params.smoke_user_keytab, params.smokeuser)
-      smoke_cmd = kinit_cmd + ' ' + validateStatusCmd
-    else:
-      smoke_cmd = validateStatusCmd
-
-    File(validateStatusFilePath,
-         content=StaticFile(validateStatusFileName)
-    )
-
-    Execute(smoke_cmd,
-            tries=3,
-            try_sleep=5,
-            logoutput=True
-    )
-
-    Execute(run_yarn_check_cmd, logoutput=True)
-
-if __name__ == "__main__":
-  ServiceCheck().execute()

+ 0 - 26
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/service_mapping.py

@@ -1,26 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-# windows services mapping
-resourcemanager_win_service_name = "resourcemanager"
-nodemanager_win_service_name = "nodemanager"
-historyserver_win_service_name = "jobhistoryserver"
-apptimelineserver_win_service_name = "historyserver"

+ 0 - 45
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/yarn.py

@@ -1,45 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-
-
-def yarn():
-  import params
-  XmlConfig("mapred-site.xml",
-            conf_dir=params.config_dir,
-            configurations=params.config['configurations']['mapred-site'],
-            owner=params.yarn_user,
-            mode='f'
-  )
-  XmlConfig("yarn-site.xml",
-            conf_dir=params.config_dir,
-            configurations=params.config['configurations']['yarn-site'],
-            owner=params.yarn_user,
-            mode='f',
-            configuration_attributes=params.config['configuration_attributes']['yarn-site']
-  )
-  XmlConfig("capacity-scheduler.xml",
-            conf_dir=params.config_dir,
-            configurations=params.config['configurations']['capacity-scheduler'],
-            owner=params.yarn_user,
-            mode='f'
-  )

+ 0 - 44
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/yarn_client.py

@@ -1,44 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-from yarn import yarn
-import os
-
-class YarnClient(Script):
-
-  def install(self, env):
-    import params
-    # client checks env var to determine if it is installed
-    if params.config_dir is None:
-      self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    yarn()
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-if __name__ == "__main__":
-  YarnClient().execute()

+ 0 - 40
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/container-executor.cfg.j2

@@ -1,40 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-#/*
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements.  See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership.  The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License.  You may obtain a copy of the License at
-# *
-# *     http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# */
-yarn.nodemanager.local-dirs={{nm_local_dirs}}
-yarn.nodemanager.log-dirs={{nm_log_dirs}}
-yarn.nodemanager.linux-container-executor.group={{yarn_executor_container_group}}
-banned.users=hdfs,yarn,mapred,bin
-min.user.id=1000

+ 0 - 21
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/exclude_hosts_list.j2

@@ -1,21 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-{% for host in exclude_hosts %}
-{{host}}
-{% endfor %}

+ 0 - 35
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/mapreduce.conf.j2

@@ -1,35 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-{{mapred_user}}   - nofile 32768
-{{mapred_user}}   - nproc  65536

+ 0 - 38
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/taskcontroller.cfg.j2

@@ -1,38 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-#/*
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements.  See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership.  The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License.  You may obtain a copy of the License at
-# *
-# *     http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# */
-mapred.local.dir={{mapred_local_dir}}
-mapreduce.tasktracker.group={{mapred_tt_group}}
-hadoop.log.dir={{hdfs_log_dir_prefix}}/{{mapred_user}}

+ 0 - 35
ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/yarn.conf.j2

@@ -1,35 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-{{yarn_user}}   - nofile 32768
-{{yarn_user}}   - nproc  65536