Browse Source

AMBARI-11306. RU to copy correct version of tarball, including tez.tar.gz and mapreduce.tar.gz (alejandro)

Alejandro Fernandez 10 years ago
parent
commit
f178723611
65 changed files with 470 additions and 1004 deletions
  1. 3 2
      ambari-common/src/main/python/resource_management/libraries/functions/check_process_status.py
  2. 141 0
      ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
  3. 3 1
      ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py
  4. 1 1
      ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
  5. 1 1
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
  6. 71 58
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
  7. 20 16
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
  8. 8 2
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
  9. 27 26
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
  10. 3 9
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
  11. 18 13
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
  12. 6 11
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
  13. 0 2
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
  14. 1 0
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/service_check.py
  15. 8 8
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
  16. 9 17
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
  17. 2 8
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
  18. 13 12
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
  19. 5 4
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
  20. 0 83
      ambari-server/src/main/resources/stacks/HDP/2.2/configuration/cluster-env.xml
  21. 0 83
      ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/configuration/cluster-env.xml
  22. 0 83
      ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/configuration/cluster-env.xml.version
  23. 1 13
      ambari-server/src/main/resources/upgrade/catalog/UpgradeCatalog_1.3_to_2.2.json
  24. 0 14
      ambari-server/src/main/resources/upgrade/catalog/UpgradeCatalog_2.0_to_2.2.2.json
  25. 0 12
      ambari-server/src/main/resources/upgrade/catalog/UpgradeCatalog_2.0_to_2.2.4.json
  26. 0 14
      ambari-server/src/main/resources/upgrade/catalog/UpgradeCatalog_2.1_to_2.2.2.json
  27. 0 12
      ambari-server/src/main/resources/upgrade/catalog/UpgradeCatalog_2.1_to_2.2.4.json
  28. 0 12
      ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java
  29. 20 57
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  30. 2 0
      ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
  31. 5 15
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
  32. 2 2
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
  33. 3 15
      ambari-server/src/test/python/stacks/2.0.6/configs/client-upgrade.json
  34. 7 19
      ambari-server/src/test/python/stacks/2.0.6/configs/default.json
  35. 1 3
      ambari-server/src/test/python/stacks/2.0.6/configs/default_hive_nn_ha.json
  36. 1 3
      ambari-server/src/test/python/stacks/2.0.6/configs/default_hive_nn_ha_2.json
  37. 1 3
      ambari-server/src/test/python/stacks/2.0.6/configs/default_no_install.json
  38. 3 17
      ambari-server/src/test/python/stacks/2.0.6/configs/hbase-2.2.json
  39. 3 17
      ambari-server/src/test/python/stacks/2.0.6/configs/hbase-check-2.2.json
  40. 5 17
      ambari-server/src/test/python/stacks/2.0.6/configs/hbase-preupgrade.json
  41. 3 17
      ambari-server/src/test/python/stacks/2.0.6/configs/hbase-rs-2.2.json
  42. 6 18
      ambari-server/src/test/python/stacks/2.0.6/configs/nn_ru_lzo.json
  43. 1 13
      ambari-server/src/test/python/stacks/2.0.6/configs/ranger-namenode-start.json
  44. 1 12
      ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
  45. 3 15
      ambari-server/src/test/python/stacks/2.0.6/configs/zk-service_check_2.2.json
  46. 2 14
      ambari-server/src/test/python/stacks/2.1/configs/client-upgrade.json
  47. 10 20
      ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
  48. 7 37
      ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
  49. 2 0
      ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py
  50. 1 12
      ambari-server/src/test/python/stacks/2.2/configs/default.json
  51. 3 15
      ambari-server/src/test/python/stacks/2.2/configs/falcon-upgrade.json
  52. 1 13
      ambari-server/src/test/python/stacks/2.2/configs/hive-upgrade.json
  53. 7 19
      ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
  54. 5 19
      ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
  55. 1 13
      ambari-server/src/test/python/stacks/2.2/configs/oozie-downgrade.json
  56. 1 13
      ambari-server/src/test/python/stacks/2.2/configs/oozie-upgrade.json
  57. 7 19
      ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json
  58. 7 19
      ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
  59. 7 19
      ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
  60. 1 12
      ambari-server/src/test/python/stacks/2.2/configs/secured.json
  61. 0 0
      ambari-server/src/test/python/stacks/2.2/configs/spark-job-history-server.json
  62. 0 0
      ambari-server/src/test/python/stacks/2.3/configs/hbase_default.json
  63. 0 0
      ambari-server/src/test/python/stacks/2.3/configs/hbase_secure.json
  64. 0 0
      ambari-server/src/test/python/stacks/2.3/configs/storm_default.json
  65. 0 0
      ambari-server/src/test/resources/custom_actions/ru_execute_tasks_namenode_prepare.json

+ 3 - 2
ambari-common/src/main/python/resource_management/libraries/functions/check_process_status.py

@@ -39,12 +39,13 @@ def check_process_status(pid_file):
   from resource_management.core import sudo
   from resource_management.core import sudo
 
 
   if not pid_file or not os.path.isfile(pid_file):
   if not pid_file or not os.path.isfile(pid_file):
+    Logger.info("Pid file {0} is empty or does not exist".format(str(pid_file)))
     raise ComponentIsNotRunning()
     raise ComponentIsNotRunning()
   
   
   try:
   try:
     pid = int(sudo.read_file(pid_file))
     pid = int(sudo.read_file(pid_file))
   except:
   except:
-    Logger.debug("Pid file {0} does not exist".format(pid_file))
+    Logger.info("Pid file {0} does not exist or does not contain a process id number".format(pid_file))
     raise ComponentIsNotRunning()
     raise ComponentIsNotRunning()
 
 
   try:
   try:
@@ -55,6 +56,6 @@ def check_process_status(pid_file):
     # process ID or process group ID.
     # process ID or process group ID.
     os.kill(pid, 0)
     os.kill(pid, 0)
   except OSError:
   except OSError:
-    Logger.debug("Process with pid {0} is not running. Stale pid file"
+    Logger.info("Process with pid {0} is not running. Stale pid file"
               " at {1}".format(pid, pid_file))
               " at {1}".format(pid, pid_file))
     raise ComponentIsNotRunning()
     raise ComponentIsNotRunning()

+ 141 - 0
ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py

@@ -0,0 +1,141 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+__all__ = ["copy_to_hdfs", ]
+
+import os
+import uuid
+
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions.default import default
+from resource_management.core.logger import Logger
+
+STACK_VERSION_PATTERN = "{{ stack_version }}"
+
+TARBALL_MAP = {
+  "HDP": {
+    "tez":       ("/usr/hdp/%s/tez/lib/tez.tar.gz" % STACK_VERSION_PATTERN,
+                  "/hdp/apps/%s/tez/tez.tar.gz"    % STACK_VERSION_PATTERN),
+
+    "hive":      ("/usr/hdp/%s/hive/hive.tar.gz"   % STACK_VERSION_PATTERN,
+                  "/hdp/apps/%s/hive/hive.tar.gz"  % STACK_VERSION_PATTERN),
+
+    "pig":       ("/usr/hdp/%s/pig/pig.tar.gz"     % STACK_VERSION_PATTERN,
+                  "/hdp/apps/%s/pig/pig.tar.gz"    % STACK_VERSION_PATTERN),
+
+    "hadoop_streaming": ("/usr/hdp/%s/hadoop/hadoop-streaming.jar"     % STACK_VERSION_PATTERN,
+                         "/hdp/apps/%s/mapreduce/hadoop-streaming.jar" % STACK_VERSION_PATTERN),
+
+    "sqoop":     ("/usr/hdp/%s/sqoop/sqoop.tar.gz"  % STACK_VERSION_PATTERN,
+                  "/hdp/apps/%s/sqoop/sqoop.tar.gz" % STACK_VERSION_PATTERN),
+
+    "mapreduce": ("/usr/hdp/%s/hadoop/mapreduce.tar.gz"     % STACK_VERSION_PATTERN,
+                  "/hdp/apps/%s/mapreduce/mapreduce.tar.gz" % STACK_VERSION_PATTERN)
+  }
+}
+
+def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=None, custom_dest_file=None, force_execute=False):
+  """
+  :param name: Tarball name, e.g., tez, hive, pig, sqoop.
+  :param user_group: Group to own the directory.
+  :param owner: File owner
+  :param file_mode: File permission
+  :param custom_source_file: Override the source file path
+  :param custom_dest_file: Override the destination file path
+  :param force_execute: If true, will execute the HDFS commands immediately, otherwise, will defer to the calling function.
+  :return: Will return True if successful, otherwise, False.
+  """
+  import params
+
+  if params.stack_name is None or params.stack_name.upper() not in TARBALL_MAP:
+    Logger.error("Cannot copy %s tarball to HDFS because stack %s does not support this operation." % (str(name), str(params.stack_name)))
+    return -1
+
+  if name is None or name.lower() not in TARBALL_MAP[params.stack_name.upper()]:
+    Logger.warning("Cannot copy tarball to HDFS because %s is not supported in stack for this operation." % (str(name), str(params.stack_name)))
+    return -1
+
+  (source_file, dest_file) = TARBALL_MAP[params.stack_name.upper()][name.lower()]
+
+  if custom_source_file is not None:
+    source_file = custom_source_file
+
+  if custom_dest_file is not None:
+    dest_file = custom_dest_file
+
+  upgrade_direction = default("/commandParams/upgrade_direction", None)
+  is_rolling_upgrade = upgrade_direction is not None
+  current_version = default("/hostLevelParams/current_version", None)
+  if is_rolling_upgrade:
+    # This is the version going to. In the case of a downgrade, it is the lower version.
+    current_version = default("/commandParams/version", None)
+
+  if current_version is None:
+    message_suffix = " during rolling %s" % str(upgrade_direction) if is_rolling_upgrade else ""
+    Logger.warning("Cannot copy %s tarball because unable to determine current version%s." % (str(name), message_suffix))
+    return False
+
+  source_file = source_file.replace(STACK_VERSION_PATTERN, current_version)
+  dest_file = dest_file.replace(STACK_VERSION_PATTERN, current_version)
+
+  if not os.path.exists(source_file):
+    Logger.warning("WARNING. Cannot copy %s tarball because file does not exist: %s . It is possible that this component is not installed on this host." % (str(name), str(source_file)))
+    return False
+
+  # Because CopyFromLocal does not guarantee synchronization, it's possible for two processes to first attempt to
+  # copy the file to a temporary location, then process 2 fails because the temporary file was already created by
+  # process 1, so process 2 tries to clean up by deleting the temporary file, and then process 1
+  # cannot finish the copy to the final destination, and both fail!
+  # For this reason, the file name on the destination must be unique, and we then rename it to the intended value.
+  # The rename operation is synchronized by the Namenode.
+
+  #unique_string = str(uuid.uuid4())[:8]
+  #temp_dest_file = dest_file + "." + unique_string
+
+  # The logic above cannot be used until fast-hdfs-resource.jar supports the mv command, or it switches
+  # to WebHDFS.
+
+
+  # If the directory already exists, it is a NO-OP
+  dest_dir = os.path.dirname(dest_file)
+  params.HdfsResource(dest_dir,
+                      type="directory",
+                      action="create_on_execute",
+                      owner=owner,
+                      mode=0555
+  )
+
+  # If the file already exists, it is a NO-OP
+  params.HdfsResource(dest_file,
+                      type="file",
+                      action="create_on_execute",
+                      source=source_file,
+                      group=user_group,
+                      owner=owner,
+                      mode=0444
+  )
+  Logger.info("Will attempt to copy %s tarball from %s to DFS at %s." % (name, source_file, dest_file))
+
+  # For improved performance, force_execute should be False so that it is delayed and combined with other calls.
+  # If still want to run the command now, set force_execute to True
+  if force_execute:
+    params.HdfsResource(None, action="execute")
+
+  return True

+ 3 - 1
ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py

@@ -17,10 +17,12 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 '''
 '''
+
+import socket
+
 from resource_management.core.exceptions import Fail
 from resource_management.core.exceptions import Fail
 from resource_management.core.resources import Execute
 from resource_management.core.resources import Execute
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-import socket
 
 
 
 
 def check_thrift_port_sasl(address, port, hive_auth="NOSASL", key=None, kinitcmd=None, smokeuser='ambari-qa',
 def check_thrift_port_sasl(address, port, hive_auth="NOSASL", key=None, kinitcmd=None, smokeuser='ambari-qa',

+ 1 - 1
ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py

@@ -27,7 +27,7 @@ from resource_management.core.base import Resource, ForcedListArgument, Resource
 Calling a lot of hadoop commands takes too much time.
 Calling a lot of hadoop commands takes too much time.
 The cause is that for every call new connection initialized, with datanodes, namenode.
 The cause is that for every call new connection initialized, with datanodes, namenode.
 
 
-While this resource can gather the dicteroies/files to create/delete/copyFromLocal.
+While this resource can gather the directories/files to create/delete/copyFromLocal.
 And after just with one call create all that.
 And after just with one call create all that.
 
 
 action = create_delayed / delete_delayed. Are for gathering information  about what you want
 action = create_delayed / delete_delayed. Are for gathering information  about what you want

+ 1 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py

@@ -224,7 +224,7 @@ if 'dfs.namenode.rpc-address' in config['configurations']['hdfs-site']:
 else:
 else:
   namenode_address = config['configurations']['core-site']['fs.defaultFS']
   namenode_address = config['configurations']['core-site']['fs.defaultFS']
 
 
-fs_checkpoint_dirs = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir'].split(',')
+fs_checkpoint_dirs = default("/configurations/hdfs-site/dfs.namenode.checkpoint.dir", "").split(',')
 
 
 dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']
 dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']
 dfs_data_dir = ",".join([re.sub(r'^\[.+\]', '', dfs_dir.strip()) for dfs_dir in dfs_data_dir.split(",")])
 dfs_data_dir = ",".join([re.sub(r'^\[.+\]', '', dfs_dir.strip()) for dfs_dir in dfs_data_dir.split(",")])

+ 71 - 58
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py

@@ -18,14 +18,25 @@ limitations under the License.
 
 
 """
 """
 
 
-from resource_management import *
-from resource_management.libraries import functions
-import sys
 import os
 import os
 import glob
 import glob
+from urlparse import urlparse
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
+from resource_management.libraries.functions.version import compare_versions
+from resource_management.core.resources.service import ServiceConfig
+from resource_management.core.resources.system import File, Execute, Directory
+from resource_management.core.source import StaticFile, Template, DownloadSource, InlineTemplate
+from resource_management.core.shell import as_user
+from resource_management.libraries.functions.is_empty import is_empty
+from resource_management.libraries.resources.xml_config import XmlConfig
+from resource_management.libraries.functions.format import format
+from resource_management.core.exceptions import Fail
+
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 from ambari_commons import OSConst
-from urlparse import urlparse
 
 
 
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
@@ -81,24 +92,16 @@ def hive(name=None):
   import params
   import params
 
 
   if name == 'hiveserver2':
   if name == 'hiveserver2':
-
-    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >=0:
-      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-        
+    # HDP 2.1.* or lower
     if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
     if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
       params.HdfsResource(params.webhcat_apps_dir,
       params.HdfsResource(params.webhcat_apps_dir,
-                           type="directory",
-                           action="create_on_execute",
-                           owner=params.webhcat_user,
-                           mode=0755
-      )
-  
+                            type="directory",
+                            action="create_on_execute",
+                            owner=params.webhcat_user,
+                            mode=0755
+                          )
+    
+    # Create webhcat dirs.
     if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
     if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
       params.HdfsResource(params.hcat_hdfs_user_dir,
       params.HdfsResource(params.hcat_hdfs_user_dir,
                            type="directory",
                            type="directory",
@@ -106,56 +109,66 @@ def hive(name=None):
                            owner=params.hcat_user,
                            owner=params.hcat_user,
                            mode=params.hcat_hdfs_user_mode
                            mode=params.hcat_hdfs_user_mode
       )
       )
+
     params.HdfsResource(params.webhcat_hdfs_user_dir,
     params.HdfsResource(params.webhcat_hdfs_user_dir,
                          type="directory",
                          type="directory",
                          action="create_on_execute",
                          action="create_on_execute",
                          owner=params.webhcat_user,
                          owner=params.webhcat_user,
                          mode=params.webhcat_hdfs_user_mode
                          mode=params.webhcat_hdfs_user_mode
     )
     )
-  
-    for src_filepath in glob.glob(params.hadoop_streaming_tar_source):
-      src_filename = os.path.basename(src_filepath)
-      params.HdfsResource(InlineTemplate(params.hadoop_streaming_tar_destination_dir).get_content() + '/' + src_filename,
-                          type="file",
-                          action="create_on_execute",
-                          source=src_filepath,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-  
-    if (os.path.isfile(params.pig_tar_source)):
-      params.HdfsResource(InlineTemplate(params.pig_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.pig_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-  
-    params.HdfsResource(InlineTemplate(params.hive_tar_destination).get_content(),
-                        type="file",
-                        action="create_on_execute",
-                        source=params.hive_tar_source,
-                        group=params.user_group,
-                        mode=params.tarballs_mode
-    )
- 
-    for src_filepath in glob.glob(params.sqoop_tar_source):
-      src_filename = os.path.basename(src_filepath)
-      params.HdfsResource(InlineTemplate(params.sqoop_tar_destination_dir).get_content() + '/' + src_filename,
-                          type="file",
-                          action="create_on_execute",
-                          source=src_filepath,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-      
+
+    # ****** Begin Copy Tarballs ******
+    # *********************************
+    # HDP 2.2 or higher, copy mapreduce.tar.gz to HDFS
+    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >= 0:
+      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user)
+
+    # Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
+    # This can use a different source and dest location to account for both HDP 2.1 and 2.2
+    copy_to_hdfs("pig",
+                 params.user_group,
+                 params.hdfs_user,
+                 file_mode=params.tarballs_mode,
+                 custom_source_file=params.pig_tar_source,
+                 custom_dest_file=params.pig_tar_dest_file)
+    copy_to_hdfs("hive",
+                 params.user_group,
+                 params.hdfs_user,
+                 file_mode=params.tarballs_mode,
+                 custom_source_file=params.hive_tar_source,
+                 custom_dest_file=params.hive_tar_dest_file)
+
+    wildcard_tarballs = ["sqoop", "hadoop_streaming"]
+    for tarball_name in wildcard_tarballs:
+      source_file_pattern = eval("params." + tarball_name + "_tar_source")
+      dest_dir = eval("params." + tarball_name + "_tar_dest_dir")
+
+      if source_file_pattern is None or dest_dir is None:
+        continue
+
+      source_files = glob.glob(source_file_pattern) if "*" in source_file_pattern else [source_file_pattern]
+      for source_file in source_files:
+        src_filename = os.path.basename(source_file)
+        dest_file = os.path.join(dest_dir, src_filename)
+
+        copy_to_hdfs(tarball_name,
+                     params.user_group,
+                     params.hdfs_user,
+                     file_mode=params.tarballs_mode,
+                     custom_source_file=source_file,
+                     custom_dest_file=dest_file)
+    # ******* End Copy Tarballs *******
+    # *********************************
+
+    # Create Hive Metastore Warehouse Dir
     params.HdfsResource(params.hive_apps_whs_dir,
     params.HdfsResource(params.hive_apps_whs_dir,
                          type="directory",
                          type="directory",
                           action="create_on_execute",
                           action="create_on_execute",
                           owner=params.hive_user,
                           owner=params.hive_user,
                           mode=0777
                           mode=0777
     )
     )
+
+    # Create Hive User Dir
     params.HdfsResource(params.hive_hdfs_user_dir,
     params.HdfsResource(params.hive_hdfs_user_dir,
                          type="directory",
                          type="directory",
                           action="create_on_execute",
                           action="create_on_execute",

+ 20 - 16
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -17,20 +17,30 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 
 
 """
 """
-import hive_server_upgrade
 
 
-from resource_management import *
-from hive import hive
-from hive_service import hive_service
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import hdp_select
 from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.get_hdp_version import get_hdp_version
 from resource_management.libraries.functions.get_hdp_version import get_hdp_version
+from resource_management.libraries.functions.check_process_status import check_process_status
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
   FILE_TYPE_XML
+from ambari_commons import OSCheck, OSConst
+if OSCheck.is_windows_family():
+  from resource_management.libraries.functions.windows_service_utils import check_windows_service_status
 from setup_ranger_hive import setup_ranger_hive
 from setup_ranger_hive import setup_ranger_hive
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
-from ambari_commons import OSConst
+from resource_management.core.logger import Logger
+
+import hive_server_upgrade
+from hive import hive
+from hive_service import hive_service
 
 
 
 
 class HiveServer(Script):
 class HiveServer(Script):
@@ -100,18 +110,12 @@ class HiveServerDefault(HiveServer):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hive", params.version)
       hdp_select.select("hive-server2", params.version)
       hdp_select.select("hive-server2", params.version)
-      old = params.hdp_stack_version
-      try:
-        params.hdp_stack_version = get_hdp_version('hive-server2')
-        params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                            type="file",
-                            action="create_on_execute",
-                            source=params.mapreduce_tar_source,
-                            group=params.user_group,
-                            mode=params.tarballs_mode)
+
+      # Copy mapreduce.tar.gz and tez.tar.gz to HDFS
+      resource_created = copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user)
+      resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user) or resource_created
+      if resource_created:
         params.HdfsResource(None, action="execute")
         params.HdfsResource(None, action="execute")
-      finally:
-        params.hdp_stack_version = old
 
 
   def security_status(self, env):
   def security_status(self, env):
     import status_params
     import status_params

+ 8 - 2
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py

@@ -18,11 +18,17 @@ limitations under the License.
 
 
 """
 """
 
 
-from resource_management import *
-import sys
 import os
 import os
 import time
 import time
+
 from resource_management.core import shell
 from resource_management.core import shell
+from resource_management.libraries.functions.format import format
+from resource_management.core.resources.system import File, Execute
+from resource_management.core.resources.service import Service
+from resource_management.core.exceptions import Fail
+from resource_management.core.shell import as_user
+from resource_management.libraries.functions.hive_check import check_thrift_port_sasl
+
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 
 

+ 27 - 26
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py

@@ -29,6 +29,7 @@ from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.copy_tarball import STACK_VERSION_PATTERN
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
@@ -50,7 +51,7 @@ stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
 hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
 stack_is_hdp21 = Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.2")
 stack_is_hdp21 = Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.2")
 
 
-# this is not avaliable on INSTALL action because hdp-select is not available
+# this is not available on INSTALL action because hdp-select is not available
 hdp_stack_version = functions.get_hdp_version('hive-server2')
 hdp_stack_version = functions.get_hdp_version('hive-server2')
 
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
@@ -58,13 +59,16 @@ version = default("/commandParams/version", None)
 
 
 hadoop_bin_dir = "/usr/bin"
 hadoop_bin_dir = "/usr/bin"
 hadoop_home = '/usr'
 hadoop_home = '/usr'
-hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
 hive_bin = '/usr/lib/hive/bin'
 hive_bin = '/usr/lib/hive/bin'
 hive_lib = '/usr/lib/hive/lib/'
 hive_lib = '/usr/lib/hive/lib/'
 hive_var_lib = '/var/lib/hive'
 hive_var_lib = '/var/lib/hive'
+
+# These tar folders were used in HDP 2.1
+hadoop_streaming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
 pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
 pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
 hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
 hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
 sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
 sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
+
 hive_specific_configs_supported = False
 hive_specific_configs_supported = False
 hive_etc_dir_prefix = "/etc/hive"
 hive_etc_dir_prefix = "/etc/hive"
 limits_conf_dir = "/etc/security/limits.d"
 limits_conf_dir = "/etc/security/limits.d"
@@ -102,41 +106,38 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
   
   
   # --- Tarballs ---
   # --- Tarballs ---
+  # DON'T CHANGE THESE VARIABLE NAMES
+  # Values don't change from those in copy_tarball.py
+  hive_tar_source = "/usr/hdp/%s/hive/hive.tar.gz"      % STACK_VERSION_PATTERN
+  pig_tar_source = "/usr/hdp/%s/pig/pig.tar.gz"         % STACK_VERSION_PATTERN
+  hive_tar_dest_file = "/hdp/apps/%s/hive/hive.tar.gz"  % STACK_VERSION_PATTERN
+  pig_tar_dest_file = "/hdp/apps/%s/pig/pig.tar.gz"     % STACK_VERSION_PATTERN
 
 
-  hive_tar_source = config['configurations']['cluster-env']['hive_tar_source']
-  pig_tar_source = config['configurations']['cluster-env']['pig_tar_source']
-  hadoop_streaming_tar_source = config['configurations']['cluster-env']['hadoop-streaming_tar_source']
-  sqoop_tar_source = config['configurations']['cluster-env']['sqoop_tar_source']
-  mapreduce_tar_source = config['configurations']['cluster-env']['mapreduce_tar_source']
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  
-  hive_tar_destination = config['configurations']['cluster-env']['hive_tar_destination_folder']  + "/" + os.path.basename(hive_tar_source)
-  pig_tar_destination = config['configurations']['cluster-env']['pig_tar_destination_folder'] + "/" + os.path.basename(pig_tar_source)
-  hadoop_streaming_tar_destination_dir = config['configurations']['cluster-env']['hadoop-streaming_tar_destination_folder']
-  sqoop_tar_destination_dir = config['configurations']['cluster-env']['sqoop_tar_destination_folder']
-  mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
-  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
+
+  hadoop_streaming_tar_source = "/usr/hdp/%s/hadoop/hadoop-streaming.jar"    % STACK_VERSION_PATTERN
+  sqoop_tar_source = "/usr/hdp/%s/sqoop/sqoop.tar.gz"                        % STACK_VERSION_PATTERN
+  hadoop_streaming_tar_dest_dir = "/hdp/apps/%s/mapreduce/"                  % STACK_VERSION_PATTERN
+  sqoop_tar_dest_dir = "/hdp/apps/%s/sqoop/"                                 % STACK_VERSION_PATTERN
 
 
   tarballs_mode = 0444
   tarballs_mode = 0444
 else:
 else:
   # --- Tarballs ---
   # --- Tarballs ---
+  webhcat_apps_dir = "/apps/webhcat"
+
+  # In HDP 2.1, the tarballs were copied from and to different locations.
+  # DON'T CHANGE THESE VARIABLE NAMES
   hive_tar_source = hive_tar_file
   hive_tar_source = hive_tar_file
   pig_tar_source = pig_tar_file
   pig_tar_source = pig_tar_file
-  hadoop_streaming_tar_source = hadoop_streeming_jars
-  sqoop_tar_source = sqoop_tar_file
+  hive_tar_dest_file = webhcat_apps_dir + "/hive.tar.gz"
+  pig_tar_dest_file = webhcat_apps_dir + "/pig.tar.gz"
 
 
-  webhcat_apps_dir = "/apps/webhcat"
-  
-  hive_tar_destination = webhcat_apps_dir + "/" + os.path.basename(hive_tar_source)
-  pig_tar_destination = webhcat_apps_dir + "/" + os.path.basename(pig_tar_source)
-  hadoop_streaming_tar_destination_dir = webhcat_apps_dir
-  sqoop_tar_destination_dir = webhcat_apps_dir
+  hadoop_streaming_tar_source = hadoop_streaming_jars   # this contains *
+  sqoop_tar_source = sqoop_tar_file                     # this contains *
+  hadoop_streaming_tar_dest_dir = webhcat_apps_dir
+  sqoop_tar_dest_dir = webhcat_apps_dir
 
 
   tarballs_mode = 0755
   tarballs_mode = 0755
 
 
-
-
-
 execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
 execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
 hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']

+ 3 - 9
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py

@@ -18,15 +18,13 @@ limitations under the License.
 Ambari Agent
 Ambari Agent
 
 
 """
 """
-from resource_management import *
-from resource_management.libraries.functions import format
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
-from resource_management.libraries.script.script import Script
-
-import os
 
 
 # server configurations
 # server configurations
 config = Script.get_config()
 config = Script.get_config()
@@ -52,10 +50,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   pig_conf_dir = "/usr/hdp/current/pig-client/conf"
   pig_conf_dir = "/usr/hdp/current/pig-client/conf"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
-  
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
-
 
 
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']

+ 18 - 13
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py

@@ -18,11 +18,19 @@ limitations under the License.
 Ambari Agent
 Ambari Agent
 
 
 """
 """
-
-from resource_management import *
-from resource_management.libraries import functions
+import os
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.resources.execute_hadoop import ExecuteHadoop
+from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
+from resource_management.libraries.functions import format
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from resource_management.core.resources.system import File, Execute
+from resource_management.core.source import StaticFile
+
 
 
 class PigServiceCheck(Script):
 class PigServiceCheck(Script):
   pass
   pass
@@ -61,7 +69,8 @@ class PigServiceCheckLinux(PigServiceCheck):
       tries     = 3,
       tries     = 3,
       try_sleep = 5,
       try_sleep = 5,
       path      = format('{pig_bin_dir}:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
       path      = format('{pig_bin_dir}:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
-      user      = params.smokeuser
+      user      = params.smokeuser,
+      logoutput = True
     )
     )
 
 
     test_cmd = format("fs -test -e {output_dir}")
     test_cmd = format("fs -test -e {output_dir}")
@@ -86,14 +95,9 @@ class PigServiceCheckLinux(PigServiceCheck):
       )
       )
 
 
       # Check for Pig-on-Tez
       # Check for Pig-on-Tez
-      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
-      )
-      params.HdfsResource(None, action="execute")
+      resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user)
+      if resource_created:
+        params.HdfsResource(None, action="execute")
 
 
       if params.security_enabled:
       if params.security_enabled:
         kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
         kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
@@ -105,7 +109,8 @@ class PigServiceCheckLinux(PigServiceCheck):
         tries     = 3,
         tries     = 3,
         try_sleep = 5,
         try_sleep = 5,
         path      = format('{pig_bin_dir}:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
         path      = format('{pig_bin_dir}:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
-        user      = params.smokeuser
+        user      = params.smokeuser,
+        logoutput = True
       )
       )
 
 
       ExecuteHadoop(test_cmd,
       ExecuteHadoop(test_cmd,

+ 6 - 11
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py

@@ -20,13 +20,13 @@ limitations under the License.
 
 
 import sys
 import sys
 import os
 import os
+
+from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import hdp_select
 from resource_management.libraries.functions import hdp_select
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
-from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.check_process_status import check_process_status
-from resource_management.core.resources import Execute
-from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.core import shell
 from resource_management.core import shell
 from setup_spark import *
 from setup_spark import *
@@ -78,14 +78,9 @@ class JobHistoryServer(Script):
       conf_select.select(params.stack_name, "spark", params.version)
       conf_select.select(params.stack_name, "spark", params.version)
       hdp_select.select("spark-historyserver", params.version)
       hdp_select.select("spark-historyserver", params.version)
 
 
-      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
-      )
-      params.HdfsResource(None, action="execute")
+      resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user)
+      if resource_created:
+        params.HdfsResource(None, action="execute")
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
   JobHistoryServer().execute()
   JobHistoryServer().execute()

+ 0 - 2
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py

@@ -70,8 +70,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_pid_dir = status_params.spark_pid_dir
   spark_pid_dir = status_params.spark_pid_dir
   spark_home = format("/usr/hdp/current/{component_directory}")
   spark_home = format("/usr/hdp/current/{component_directory}")
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
 
 
 
 
 java_home = config['hostLevelParams']['java_home']
 java_home = config['hostLevelParams']['java_home']

+ 1 - 0
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/service_check.py

@@ -36,6 +36,7 @@ class SparkServiceCheck(Script):
     Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k http://{spark_history_server_host}:{spark_history_ui_port} | grep 200"),
     Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k http://{spark_history_server_host}:{spark_history_ui_port} | grep 200"),
       tries = 10,
       tries = 10,
       try_sleep=3,
       try_sleep=3,
+      logoutput=True
     )
     )
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":

+ 8 - 8
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py

@@ -17,7 +17,12 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 '''
 '''
-from resource_management import *
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
+from resource_management.libraries.functions import format
+from resource_management.core.resources.system import File, Execute
 
 
 def spark_service(action):
 def spark_service(action):
   import params
   import params
@@ -27,13 +32,8 @@ def spark_service(action):
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal}; ")
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal}; ")
       Execute(spark_kinit_cmd, user=params.spark_user)
       Execute(spark_kinit_cmd, user=params.spark_user)
 
 
-      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
-      )
+    resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user)
+    if resource_created:
       params.HdfsResource(None, action="execute")
       params.HdfsResource(None, action="execute")
 
 
     no_op_test = format(
     no_op_test = format(

+ 9 - 17
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py

@@ -19,14 +19,20 @@ Ambari Agent
 
 
 """
 """
 
 
-from resource_management import *
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import hdp_select
 from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions.check_process_status import check_process_status
+from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
   FILE_TYPE_XML
+from resource_management.core.source import Template
+from resource_management.core.logger import Logger
+
 from yarn import yarn
 from yarn import yarn
 from service import service
 from service import service
 from ambari_commons import OSConst
 from ambari_commons import OSConst
@@ -73,14 +79,7 @@ class HistoryServerDefault(HistoryServer):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
       hdp_select.select("hadoop-mapreduce-historyserver", params.version)
       hdp_select.select("hadoop-mapreduce-historyserver", params.version)
-      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          owner=params.hdfs_user,
-                          group=params.user_group,
-                          mode=0444,
-      )
+      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user)
       params.HdfsResource(None, action="execute")
       params.HdfsResource(None, action="execute")
 
 
 
 
@@ -90,14 +89,7 @@ class HistoryServerDefault(HistoryServer):
     self.configure(env) # FOR SECURITY
     self.configure(env) # FOR SECURITY
     
     
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
-      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          owner=params.hdfs_user,
-                          group=params.user_group,
-                          mode=0444,
-      )
+      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user)
       params.HdfsResource(None, action="execute")
       params.HdfsResource(None, action="execute")
 
 
     service('historyserver', action='start', serviceName='mapreduce')
     service('historyserver', action='start', serviceName='mapreduce')

+ 2 - 8
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py

@@ -20,13 +20,13 @@ Ambari Agent
 """
 """
 import os
 import os
 
 
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management import *
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
-from resource_management.libraries.script.script import Script
 from resource_management.libraries import functions
 from resource_management.libraries import functions
 
 
 import status_params
 import status_params
@@ -91,16 +91,10 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_yarn_home = format("/usr/hdp/current/{yarn_role_root}")
   hadoop_yarn_home = format("/usr/hdp/current/{yarn_role_root}")
   yarn_bin = format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_bin = format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_container_bin = format("/usr/hdp/current/{yarn_role_root}/bin")
   yarn_container_bin = format("/usr/hdp/current/{yarn_role_root}/bin")
-  
-  mapreduce_tar_source = config['configurations']['cluster-env']['mapreduce_tar_source']
-  mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
 
 
   # the configuration direction for HDFS/YARN/MapR is the hadoop config
   # the configuration direction for HDFS/YARN/MapR is the hadoop config
   # directory, which is symlinked by hadoop-client only
   # directory, which is symlinked by hadoop-client only
   hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
   hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
-
 
 
 limits_conf_dir = "/etc/security/limits.d"
 limits_conf_dir = "/etc/security/limits.d"
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + yarn_container_bin
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + yarn_container_bin

+ 13 - 12
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py

@@ -19,20 +19,28 @@ Ambari Agent
 
 
 """
 """
 
 
-from resource_management import *
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import hdp_select
 from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions.check_process_status import check_process_status
+from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
   FILE_TYPE_XML
+from resource_management.core.resources.system import File, Execute
+from resource_management.core.source import Template
+from resource_management.core.logger import Logger
+
+
+
 from install_jars import install_tez_jars
 from install_jars import install_tez_jars
 from yarn import yarn
 from yarn import yarn
 from service import service
 from service import service
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
 from setup_ranger_yarn import setup_ranger_yarn
 from setup_ranger_yarn import setup_ranger_yarn
-import os
 
 
 
 
 class Resourcemanager(Script):
 class Resourcemanager(Script):
@@ -108,16 +116,9 @@ class ResourcemanagerDefault(Resourcemanager):
     if not Script.is_hdp_stack_greater_or_equal("2.2"):
     if not Script.is_hdp_stack_greater_or_equal("2.2"):
       install_tez_jars()
       install_tez_jars()
     else:
     else:
-      # will work only for stack versions >=2.2
-      if os.path.exists(params.tez_tar_source):
-        params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                            type="file",
-                            action="create_on_execute",
-                            source=params.tez_tar_source,
-                            group=params.user_group,
-                            owner=params.hdfs_user
-        )
-      params.HdfsResource(None, action="execute")
+      resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user)
+      if resource_created:
+        params.HdfsResource(None, action="execute")
     service('resourcemanager', action='start')
     service('resourcemanager', action='start')
 
 
   def status(self, env):
   def status(self, env):

+ 5 - 4
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py

@@ -17,9 +17,10 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 
 
 """
 """
-
-from resource_management import *
-from resource_management.libraries.functions import conf_select
+from resource_management.libraries.script.script import Script
+from resource_management.libraries import functions
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.default import default
 from ambari_commons import OSCheck
 from ambari_commons import OSCheck
 
 
 config = Script.get_config()
 config = Script.get_config()
@@ -51,7 +52,7 @@ else:
   yarn_historyserver_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
   yarn_historyserver_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
   mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
   mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
 
 
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+  hadoop_conf_dir = functions.conf_select.get_hadoop_conf_dir()
 
 
   hostname = config['hostname']
   hostname = config['hostname']
   kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
   kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))

+ 0 - 83
ambari-server/src/main/resources/stacks/HDP/2.2/configuration/cluster-env.xml

@@ -21,87 +21,4 @@
 -->
 -->
 
 
 <configuration>
 <configuration>
-
-  <!-- The properties that end in tar_source describe the pattern of where the tar.gz files come from.
-  They will replace {{ hdp_stack_version }} with the "#.#.#.#" value followed by -* (which is the build number in HDP 2.2).
-  When copying those tarballs, Ambari will look up the corresponding tar_destination_folder property to know where it
-  should be copied to.
-  All of the destination folders must begin with hdfs://
-  Please note that the spaces inside of {{ ... }} are important.
-
-  IMPORTANT: Any properties included here must also be declared in site_properties.js
-
-  -->
-  <!-- Tez tarball is needed by Hive Server when using the Tez execution engine. -->
-  <property>
-    <name>tez_tar_source</name>
-    <value>/usr/hdp/current/tez-client/lib/tez.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>tez_tar_destination_folder</name>
-    <value>hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Hive tarball is needed by WebHCat. -->
-  <property>
-    <name>hive_tar_source</name>
-    <value>/usr/hdp/current/hive-client/hive.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>hive_tar_destination_folder</name>
-    <value>hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Pig tarball is needed by WebHCat. -->
-  <property>
-    <name>pig_tar_source</name>
-    <value>/usr/hdp/current/pig-client/pig.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>pig_tar_destination_folder</name>
-    <value>hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Hadoop Streaming jar is needed by WebHCat. -->
-  <property>
-    <name>hadoop-streaming_tar_source</name>
-    <value>/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>hadoop-streaming_tar_destination_folder</name>
-    <value>hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Sqoop tarball is needed by WebHCat. -->
-  <property>
-    <name>sqoop_tar_source</name>
-    <value>/usr/hdp/current/sqoop-client/sqoop.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>sqoop_tar_destination_folder</name>
-    <value>hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- MapReduce2 tarball -->
-  <property>
-    <name>mapreduce_tar_source</name>
-    <value>/usr/hdp/current/hadoop-client/mapreduce.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>mapreduce_tar_destination_folder</name>
-    <value>hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
 </configuration>
 </configuration>

+ 0 - 83
ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/configuration/cluster-env.xml

@@ -21,87 +21,4 @@
 -->
 -->
 
 
 <configuration>
 <configuration>
-
-  <!-- The properties that end in tar_source describe the pattern of where the tar.gz files come from.
-  They will replace {{ hdp_stack_version }} with the "#.#.#.#" value followed by -* (which is the build number in HDP 2.2).
-  When copying those tarballs, Ambari will look up the corresponding tar_destination_folder property to know where it
-  should be copied to.
-  All of the destination folders must begin with hdfs://
-  Please note that the spaces inside of {{ ... }} are important.
-
-  IMPORTANT: Any properties included here must also be declared in site_properties.js
-
-  -->
-  <!-- Tez tarball is needed by Hive Server when using the Tez execution engine. -->
-  <property>
-    <name>tez_tar_source</name>
-    <value>/usr/hdp/current/tez-client/lib/tez.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>tez_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/tez/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Hive tarball is needed by WebHCat. -->
-  <property>
-    <name>hive_tar_source</name>
-    <value>/usr/hdp/current/hive-client/hive.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>hive_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/hive/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Pig tarball is needed by WebHCat. -->
-  <property>
-    <name>pig_tar_source</name>
-    <value>/usr/hdp/current/pig-client/pig.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>pig_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/pig/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Hadoop Streaming jar is needed by WebHCat. -->
-  <property>
-    <name>hadoop-streaming_tar_source</name>
-    <value>/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>hadoop-streaming_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/mapreduce/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Sqoop tarball is needed by WebHCat. -->
-  <property>
-    <name>sqoop_tar_source</name>
-    <value>/usr/hdp/current/sqoop-client/sqoop.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>sqoop_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/sqoop/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- MapReduce2 tarball -->
-  <property>
-    <name>mapreduce_tar_source</name>
-    <value>/usr/hdp/current/hadoop-client/mapreduce.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>mapreduce_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/mapreduce/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
 </configuration>
 </configuration>

+ 0 - 83
ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/configuration/cluster-env.xml.version

@@ -21,87 +21,4 @@
 -->
 -->
 
 
 <configuration>
 <configuration>
-
-  <!-- The properties that end in tar_source describe the pattern of where the tar.gz files come from.
-  They will replace {{ hdp_stack_version }} with the "#.#.#.#" value followed by -* (which is the build number in HDP 2.2).
-  When copying those tarballs, Ambari will look up the corresponding tar_destination_folder property to know where it
-  should be copied to.
-  All of the destination folders must begin with hdfs://
-  Please note that the spaces inside of {{ ... }} are important.
-
-  IMPORTANT: Any properties included here must also be declared in site_properties.js
-
-  -->
-  <!-- Tez tarball is needed by Hive Server when using the Tez execution engine. -->
-  <property>
-    <name>tez_tar_source</name>
-    <value>/usr/hdp/current/tez-client/lib/tez.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>tez_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/tez/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Hive tarball is needed by WebHCat. -->
-  <property>
-    <name>hive_tar_source</name>
-    <value>/usr/hdp/current/hive-client/hive.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>hive_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/hive/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Pig tarball is needed by WebHCat. -->
-  <property>
-    <name>pig_tar_source</name>
-    <value>/usr/hdp/current/pig-client/pig.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>pig_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/pig/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Hadoop Streaming jar is needed by WebHCat. -->
-  <property>
-    <name>hadoop-streaming_tar_source</name>
-    <value>/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>hadoop-streaming_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/mapreduce/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- Sqoop tarball is needed by WebHCat. -->
-  <property>
-    <name>sqoop_tar_source</name>
-    <value>/usr/hdp/current/sqoop-client/sqoop.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>sqoop_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/sqoop/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
-  <!-- MapReduce2 tarball -->
-  <property>
-    <name>mapreduce_tar_source</name>
-    <value>/usr/hdp/current/hadoop-client/mapreduce.tar.gz</value>
-    <description>Source file path that uses dynamic variables and regex to copy the file to HDFS.</description>
-  </property>
-  <property>
-    <name>mapreduce_tar_destination_folder</name>
-    <value>glusterfs:///apps/{{ hdp_stack_version }}/mapreduce/</value>
-    <description>Destination HDFS folder for the file.</description>
-  </property>
-
 </configuration>
 </configuration>

+ 1 - 13
ambari-server/src/main/resources/upgrade/catalog/UpgradeCatalog_1.3_to_2.2.json

@@ -148,19 +148,7 @@
            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-{{hbase_home}}}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-{{hive_home}}}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\""
            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-{{hbase_home}}}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-{{hive_home}}}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\""
         },
         },
         "cluster-env": {
         "cluster-env": {
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
-            "ignore_groupsusers_create": "false",
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz",
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
+            "ignore_groupsusers_create": "false"
         },
         },
         "capacity-scheduler": {
         "capacity-scheduler": {
           "yarn.scheduler.capacity.maximum-am-resource-percent": "0.2",
           "yarn.scheduler.capacity.maximum-am-resource-percent": "0.2",

+ 0 - 14
ambari-server/src/main/resources/upgrade/catalog/UpgradeCatalog_2.0_to_2.2.2.json

@@ -65,20 +65,6 @@
                     "yarn.scheduler.capacity.root.accessible-node-labels.default.maximum-capacity": "-1",
                     "yarn.scheduler.capacity.root.accessible-node-labels.default.maximum-capacity": "-1",
                     "yarn.scheduler.capacity.root.default-node-label-expression": " "
                     "yarn.scheduler.capacity.root.default-node-label-expression": " "
                 },
                 },
-                "cluster-env": {
-                    "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-                    "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-                    "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
-                    "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-                    "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-                    "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-                    "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/",
-                    "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
-                    "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
-                    "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-                    "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
-                    "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz"
-                },
                 "core-site": {
                 "core-site": {
                     "hadoop.http.authentication.simple.anonymous.allowed": "true"
                     "hadoop.http.authentication.simple.anonymous.allowed": "true"
                 },
                 },

+ 0 - 12
ambari-server/src/main/resources/upgrade/catalog/UpgradeCatalog_2.0_to_2.2.4.json

@@ -78,18 +78,6 @@
                     "yarn.scheduler.capacity.root.default-node-label-expression": " "
                     "yarn.scheduler.capacity.root.default-node-label-expression": " "
                 },
                 },
                 "cluster-env": {
                 "cluster-env": {
-                    "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-                    "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-                    "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
-                    "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-                    "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-                    "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-                    "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/",
-                    "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
-                    "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
-                    "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-                    "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
-                    "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz",
                     "smokeuser_principal_name": {
                     "smokeuser_principal_name": {
                         "remove": "yes"
                         "remove": "yes"
                     }
                     }

+ 0 - 14
ambari-server/src/main/resources/upgrade/catalog/UpgradeCatalog_2.1_to_2.2.2.json

@@ -76,20 +76,6 @@
           "yarn.scheduler.capacity.root.accessible-node-labels.default.capacity": "-1",
           "yarn.scheduler.capacity.root.accessible-node-labels.default.capacity": "-1",
           "yarn.scheduler.capacity.root.accessible-node-labels.default.maximum-capacity": "-1",
           "yarn.scheduler.capacity.root.accessible-node-labels.default.maximum-capacity": "-1",
           "yarn.scheduler.capacity.root.default-node-label-expression": " "    
           "yarn.scheduler.capacity.root.default-node-label-expression": " "    
-        },
-		"cluster-env": {
-          "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-          "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-          "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
-          "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-          "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-          "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-          "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-          "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
-          "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-          "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-          "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-          "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz"        
         },
         },
 		"core-site": {
 		"core-site": {
 		  "hadoop.http.authentication.simple.anonymous.allowed": "true"  
 		  "hadoop.http.authentication.simple.anonymous.allowed": "true"  

+ 0 - 12
ambari-server/src/main/resources/upgrade/catalog/UpgradeCatalog_2.1_to_2.2.4.json

@@ -84,18 +84,6 @@
           "yarn.scheduler.capacity.root.default-node-label-expression": " "
           "yarn.scheduler.capacity.root.default-node-label-expression": " "
         },
         },
 		"cluster-env": {
 		"cluster-env": {
-          "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-          "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-          "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
-          "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-          "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-          "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-          "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/",
-          "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
-          "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
-          "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-          "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
-          "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz",
           "smokeuser_principal_name": {"remove": "yes"}
           "smokeuser_principal_name": {"remove": "yes"}
         },
         },
 		"core-site": {
 		"core-site": {

+ 0 - 12
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java

@@ -451,20 +451,8 @@ public class UpgradeCatalog200Test {
     propertiesExpectedT0.put("user_group", "hadoop");
     propertiesExpectedT0.put("user_group", "hadoop");
     propertiesExpectedT0.put("kinit_path_local", "/usr/bin");
     propertiesExpectedT0.put("kinit_path_local", "/usr/bin");
     propertiesExpectedT0.put("security_enabled", "true");
     propertiesExpectedT0.put("security_enabled", "true");
-    propertiesExpectedT0.put("hive_tar_destination_folder", "hdfs,///hdp/apps/{{ hdp_stack_version }}/hive/");
-    propertiesExpectedT0.put("sqoop_tar_source", "/usr/hdp/current/sqoop-client/sqoop.tar.gz");
-    propertiesExpectedT0.put("hadoop-streaming_tar_destination_folder", "hdfs,///hdp/apps/{{ hdp_stack_version }}/mapreduce/");
-    propertiesExpectedT0.put("pig_tar_source", "/usr/hdp/current/pig-client/pig.tar.gz");
-    propertiesExpectedT0.put("mapreduce_tar_destination_folder", "hdfs,///hdp/apps/{{ hdp_stack_version }}/mapreduce/");
-    propertiesExpectedT0.put("hive_tar_source", "/usr/hdp/current/hive-client/hive.tar.gz");
-    propertiesExpectedT0.put("mapreduce_tar_source", "/usr/hdp/current/hadoop-client/mapreduce.tar.gz");
     propertiesExpectedT0.put("smokeuser", "ambari-qa");
     propertiesExpectedT0.put("smokeuser", "ambari-qa");
-    propertiesExpectedT0.put("pig_tar_destination_folder", "hdfs,///hdp/apps/{{ hdp_stack_version }}/pig/");
-    propertiesExpectedT0.put("hadoop-streaming_tar_source", "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar");
-    propertiesExpectedT0.put("tez_tar_destination_folder", "hdfs,///hdp/apps/{{ hdp_stack_version }}/tez/");
     propertiesExpectedT0.put("smokeuser_keytab", "/etc/security/keytabs/smokeuser.headless.keytab");
     propertiesExpectedT0.put("smokeuser_keytab", "/etc/security/keytabs/smokeuser.headless.keytab");
-    propertiesExpectedT0.put("sqoop_tar_destination_folder", "hdfs,///hdp/apps/{{ hdp_stack_version }}/sqoop/");
-    propertiesExpectedT0.put("tez_tar_source", "/usr/hdp/current/tez-client/lib/tez.tar.gz");
     propertiesExpectedT0.put("ignore_groupsusers_create", "false");
     propertiesExpectedT0.put("ignore_groupsusers_create", "false");
 
 
     final Map<String, String> propertiesExpectedT1 = new HashMap<String, String>(propertiesExpectedT0);
     final Map<String, String> propertiesExpectedT1 = new HashMap<String, String>(propertiesExpectedT0);

+ 20 - 57
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -35,6 +35,7 @@ class TestHiveServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
   STACK_VERSION = "2.0.6"
   UPGRADE_STACK_VERSION = "2.2"
   UPGRADE_STACK_VERSION = "2.2"
+
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   def test_configure_default(self):
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
@@ -51,7 +52,6 @@ class TestHiveServer(RMFTestCase):
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   def test_start_default(self, socket_mock):
   def test_start_default(self, socket_mock):
     s = socket_mock.return_value
     s = socket_mock.return_value
-
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname="HiveServer",
                        classname="HiveServer",
                        command="start",
                        command="start",
@@ -84,7 +84,6 @@ class TestHiveServer(RMFTestCase):
 
 
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   def test_start_default_no_copy(self):
   def test_start_default_no_copy(self):
-
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
                        classname = "HiveServer",
                        command = "start",
                        command = "start",
@@ -271,6 +270,7 @@ class TestHiveServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self, no_tmp = False):
   def assert_configure_default(self, no_tmp = False):
+    # Verify creating of Hcat and Hive directories
     self.assertResourceCalled('HdfsResource', '/apps/webhcat',
     self.assertResourceCalled('HdfsResource', '/apps/webhcat',
         security_enabled = False,
         security_enabled = False,
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
@@ -295,19 +295,7 @@ class TestHiveServer(RMFTestCase):
         action = ['create_on_execute'],
         action = ['create_on_execute'],
         mode = 0755,
         mode = 0755,
     )
     )
-    self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/share/HDP-webhcat/hive.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'file',
-        mode = 0755,
-    )
+
     self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
     self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
         security_enabled = False,
         security_enabled = False,
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
@@ -363,6 +351,7 @@ class TestHiveServer(RMFTestCase):
                               group='hadoop',
                               group='hadoop',
                               recursive=True,
                               recursive=True,
     )
     )
+
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
                               group='hadoop',
                               group='hadoop',
                               conf_dir='/etc/hive/conf',
                               conf_dir='/etc/hive/conf',
@@ -458,7 +447,6 @@ class TestHiveServer(RMFTestCase):
                               cd_access='a',
                               cd_access='a',
     )
     )
 
 
-
   def assert_configure_secured(self):
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsResource', '/apps/webhcat',
     self.assertResourceCalled('HdfsResource', '/apps/webhcat',
         security_enabled = True,
         security_enabled = True,
@@ -484,19 +472,7 @@ class TestHiveServer(RMFTestCase):
         action = ['create_on_execute'],
         action = ['create_on_execute'],
         mode = 0755,
         mode = 0755,
     )
     )
-    self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        source = '/usr/share/HDP-webhcat/hive.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'file',
-        mode = 0755,
-    )
+
     self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
     self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
         security_enabled = True,
         security_enabled = True,
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
@@ -815,7 +791,10 @@ class TestHiveServer(RMFTestCase):
     put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
     put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
 
 
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
-  def test_pre_rolling_restart(self):
+  @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
+  def test_pre_rolling_restart(self, copy_to_hdfs_mock):
+    copy_to_hdfs_mock.return_value = True
+
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
       json_content = json.load(f)
       json_content = json.load(f)
@@ -829,19 +808,10 @@ class TestHiveServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
                               'hdp-select set hive-server2 %s' % version,)
                               'hdp-select set hive-server2 %s' % version,)
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.0.0.0-1234/mapreduce//mapreduce.tar.gz',
-        security_enabled = False,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        mode = 0444,
-    )
+
+    copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs")
+    copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs")
+    self.assertEquals(2, copy_to_hdfs_mock.call_count)
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
         security_enabled = False,
         hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
         hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
@@ -855,13 +825,15 @@ class TestHiveServer(RMFTestCase):
 
 
   @patch("resource_management.core.shell.call")
   @patch("resource_management.core.shell.call")
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
-  def test_pre_rolling_restart_23(self, call_mock):
+  @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
+  def test_pre_rolling_restart_23(self, copy_to_hdfs_mock, call_mock):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
       json_content = json.load(f)
       json_content = json.load(f)
     version = '2.3.0.0-1234'
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
     json_content['commandParams']['version'] = version
 
 
+    copy_to_hdfs_mock.return_value = True
     mocks_dict = {}
     mocks_dict = {}
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
                        classname = "HiveServer",
@@ -873,20 +845,11 @@ class TestHiveServer(RMFTestCase):
                        mocks_dict = mocks_dict)
                        mocks_dict = mocks_dict)
 
 
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
+
                               'hdp-select set hive-server2 %s' % version,)
                               'hdp-select set hive-server2 %s' % version,)
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.0.0.0-1234/mapreduce//mapreduce.tar.gz',
-        security_enabled = False,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        mode = 0444,
-    )
+    copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs")
+    copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs")
+    self.assertEquals(2, copy_to_hdfs_mock.call_count)
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
         security_enabled = False,
         hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
         hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',

+ 2 - 0
ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py

@@ -73,6 +73,7 @@ class TestPigServiceCheck(RMFTestCase):
       tries = 3,
       tries = 3,
       user = 'ambari-qa',
       user = 'ambari-qa',
       try_sleep = 5,
       try_sleep = 5,
+      logoutput = True
     )
     )
        
        
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
@@ -132,6 +133,7 @@ class TestPigServiceCheck(RMFTestCase):
       tries = 3,
       tries = 3,
       user = 'ambari-qa',
       user = 'ambari-qa',
       try_sleep = 5,
       try_sleep = 5,
+      logoutput = True
     )
     )
        
        
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',

+ 5 - 15
ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py

@@ -674,13 +674,15 @@ class TestHistoryServer(RMFTestCase):
 
 
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value="2.3.0"))
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value="2.3.0"))
   @patch.object(functions, "get_hdp_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch.object(functions, "get_hdp_version", new = MagicMock(return_value="2.3.0.0-1234"))
-  def test_pre_rolling_restart_23(self):
+  @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
+  def test_pre_rolling_restart_23(self, copy_to_hdfs_mock):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
       json_content = json.load(f)
       json_content = json.load(f)
     version = '2.3.0.0-1234'
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
     json_content['commandParams']['version'] = version
 
 
+    copy_to_hdfs_mock.return_value = True
     mocks_dict = {}
     mocks_dict = {}
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/historyserver.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/historyserver.py",
                        classname = "HistoryServer",
                        classname = "HistoryServer",
@@ -692,20 +694,7 @@ class TestHistoryServer(RMFTestCase):
                        mocks_dict = mocks_dict)
                        mocks_dict = mocks_dict)
 
 
     self.assertResourceCalled('Execute', 'hdp-select set hadoop-mapreduce-historyserver %s' % version)
     self.assertResourceCalled('Execute', 'hdp-select set hadoop-mapreduce-historyserver %s' % version)
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.3.0.0-1234/mapreduce//mapreduce.tar.gz',
-        security_enabled = False,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        action = ['create_on_execute'],
-        mode = 0444,
-    )
+    copy_to_hdfs_mock.assert_called_with("mapreduce", "hadoop", "hdfs")
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
         security_enabled = False,
         hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
         hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
@@ -715,6 +704,7 @@ class TestHistoryServer(RMFTestCase):
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )
+
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
     self.assertEquals(2, mocks_dict['call'].call_count)
     self.assertEquals(2, mocks_dict['call'].call_count)

+ 2 - 2
ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py

@@ -185,7 +185,7 @@ class TestResourceManager(RMFTestCase):
         content = Template('exclude_hosts_list.j2'),
         content = Template('exclude_hosts_list.j2'),
         group = 'hadoop',
         group = 'hadoop',
     )
     )
-    self.assertResourceCalled('Execute', ' yarn --config /etc/hadoop/conf rmadmin -refreshNodes',
+    self.assertResourceCalled('Execute', '{rm_kinit_cmd} yarn --config {conf_dir} rmadmin -refreshNodes',
         environment = {'PATH': "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin"},
         environment = {'PATH': "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin"},
         user = 'yarn',
         user = 'yarn',
     )
     )
@@ -204,7 +204,7 @@ class TestResourceManager(RMFTestCase):
         content = Template('exclude_hosts_list.j2'),
         content = Template('exclude_hosts_list.j2'),
         group = 'hadoop',
         group = 'hadoop',
     )
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/rm.service.keytab rm/c6401.ambari.apache.org@EXAMPLE.COM; yarn --config /etc/hadoop/conf rmadmin -refreshNodes',
+    self.assertResourceCalled('Execute', '{rm_kinit_cmd} yarn --config {conf_dir} rmadmin -refreshNodes',
         environment = {'PATH': "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin"},
         environment = {'PATH': "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin"},
         user = 'yarn',
         user = 'yarn',
     )
     )

+ 3 - 15
ambari-server/src/test/python/stacks/2.0.6/configs/client-upgrade.json

@@ -569,22 +569,10 @@
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "ignore_groupsusers_create": "false", 
+            "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "smokeuser": "ambari-qa", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
+            "user_group": "hadoop",
+            "smokeuser": "ambari-qa"
         },
         },
 		"ranger-hbase-plugin-properties": {
 		"ranger-hbase-plugin-properties": {
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 

+ 7 - 19
ambari-server/src/test/python/stacks/2.0.6/configs/default.json

@@ -528,26 +528,14 @@
             "xml_configurations_supported" : "false"
             "xml_configurations_supported" : "false"
         },
         },
       "cluster-env": {
       "cluster-env": {
-        "security_enabled": "false",
-        "hdfs_user_principal" : "",
-        "hdfs_user_keytab" : "",
-        "ignore_groupsusers_create": "false",
-        "smokeuser": "ambari-qa",
-        "kerberos_domain": "EXAMPLE.COM",
-        "user_group": "hadoop",
-        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
+            "security_enabled": "false",
+            "hdfs_user_principal" : "",
+            "hdfs_user_keytab" : "",
+            "ignore_groupsusers_create": "false",
+            "smokeuser": "ambari-qa",
+            "kerberos_domain": "EXAMPLE.COM",
+            "user_group": "hadoop"
       },
       },
-
       "hbase-env": {
       "hbase-env": {
             "hbase_pid_dir": "/var/run/hbase", 
             "hbase_pid_dir": "/var/run/hbase", 
             "hbase_user": "hbase", 
             "hbase_user": "hbase", 

+ 1 - 3
ambari-server/src/test/python/stacks/2.0.6/configs/default_hive_nn_ha.json

@@ -281,9 +281,7 @@
         "ignore_groupsusers_create": "false",
         "ignore_groupsusers_create": "false",
         "smokeuser": "ambari-qa",
         "smokeuser": "ambari-qa",
         "kerberos_domain": "EXAMPLE.COM",
         "kerberos_domain": "EXAMPLE.COM",
-        "user_group": "hadoop",
-        "mapreduce_tar_destination_folder" : "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-        "mapreduce_tar_source" : "/usr/hdp/current/hadoop-client/mapreduce.tar.gz"
+        "user_group": "hadoop"
       },
       },
 
 
       "hbase-env": {
       "hbase-env": {

+ 1 - 3
ambari-server/src/test/python/stacks/2.0.6/configs/default_hive_nn_ha_2.json

@@ -283,9 +283,7 @@
         "ignore_groupsusers_create": "false",
         "ignore_groupsusers_create": "false",
         "smokeuser": "ambari-qa",
         "smokeuser": "ambari-qa",
         "kerberos_domain": "EXAMPLE.COM",
         "kerberos_domain": "EXAMPLE.COM",
-        "user_group": "hadoop",
-        "mapreduce_tar_destination_folder" : "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-        "mapreduce_tar_source" : "/usr/hdp/current/hadoop-client/mapreduce.tar.gz"
+        "user_group": "hadoop"
       },
       },
 
 
       "hbase-env": {
       "hbase-env": {

+ 1 - 3
ambari-server/src/test/python/stacks/2.0.6/configs/default_no_install.json

@@ -506,9 +506,7 @@
         "ignore_groupsusers_create": "false",
         "ignore_groupsusers_create": "false",
         "smokeuser": "ambari-qa",
         "smokeuser": "ambari-qa",
         "kerberos_domain": "EXAMPLE.COM",
         "kerberos_domain": "EXAMPLE.COM",
-        "user_group": "hadoop",
-        "mapreduce_tar_destination_folder" : "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-        "mapreduce_tar_source" : "/usr/hdp/current/hadoop-client/mapreduce.tar.gz"
+        "user_group": "hadoop"
       },
       },
 
 
       "hbase-env": {
       "hbase-env": {

+ 3 - 17
ambari-server/src/test/python/stacks/2.0.6/configs/hbase-2.2.json

@@ -565,24 +565,10 @@
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "oozie_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/oozie/", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "ignore_groupsusers_create": "false", 
+            "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "smokeuser": "ambari-qa", 
-            "oozie_tar_source": "/usr/hdp/current/oozie-client/oozie-sharelib.tar.gz", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
+            "user_group": "hadoop",
+            "smokeuser": "ambari-qa"
         },
         },
 "ranger-hbase-plugin-properties": {
 "ranger-hbase-plugin-properties": {
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 

+ 3 - 17
ambari-server/src/test/python/stacks/2.0.6/configs/hbase-check-2.2.json

@@ -594,24 +594,10 @@
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "oozie_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/oozie/", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "ignore_groupsusers_create": "false", 
+            "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "smokeuser": "ambari-qa", 
-            "oozie_tar_source": "/usr/hdp/current/oozie-client/oozie-sharelib.tar.gz", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
+            "user_group": "hadoop",
+            "smokeuser": "ambari-qa"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 5 - 17
ambari-server/src/test/python/stacks/2.0.6/configs/hbase-preupgrade.json

@@ -105,23 +105,11 @@
             "fs.defaultFS": "hdfs://c6403.org:8020"
             "fs.defaultFS": "hdfs://c6403.org:8020"
         },   
         },   
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "false", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "smokeuser": "ambari-qa", 
-            "ignore_groupsusers_create": "false", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "user_group": "hadoop", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
+            "security_enabled": "false",
+            "ignore_groupsusers_create": "false",
+            "kerberos_domain": "EXAMPLE.COM",
+            "user_group": "hadoop",
+            "smokeuser": "ambari-qa"
         },
         },
 		"ranger-hbase-plugin-properties": {
 		"ranger-hbase-plugin-properties": {
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 

+ 3 - 17
ambari-server/src/test/python/stacks/2.0.6/configs/hbase-rs-2.2.json

@@ -565,24 +565,10 @@
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "oozie_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/oozie/", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "ignore_groupsusers_create": "false", 
+            "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "smokeuser": "ambari-qa", 
-            "oozie_tar_source": "/usr/hdp/current/oozie-client/oozie-sharelib.tar.gz", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
+            "user_group": "hadoop",
+            "smokeuser": "ambari-qa"
         },
         },
         "ranger-hbase-plugin-properties": {
         "ranger-hbase-plugin-properties": {
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 

+ 6 - 18
ambari-server/src/test/python/stacks/2.0.6/configs/nn_ru_lzo.json

@@ -159,24 +159,12 @@
             "namenode_opt_permsize": "128m"
             "namenode_opt_permsize": "128m"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "false", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "ignore_groupsusers_create": "false", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "user_group": "hadoop", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "smokeuser": "ambari-qa", 
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz"
+            "security_enabled": "false",
+            "ignore_groupsusers_create": "false",
+            "kerberos_domain": "EXAMPLE.COM",
+            "user_group": "hadoop",
+            "smokeuser": "ambari-qa",
+            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab"
         },
         },
         "ranger-hdfs-plugin-properties": {
         "ranger-hdfs-plugin-properties": {
             "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", 
             "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", 

+ 1 - 13
ambari-server/src/test/python/stacks/2.0.6/configs/ranger-namenode-start.json

@@ -251,23 +251,11 @@
         },
         },
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
             "ignore_groupsusers_create": "false",
             "ignore_groupsusers_create": "false",
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/",
             "user_group": "hadoop",
             "user_group": "hadoop",
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
             "smokeuser": "ambari-qa",
             "smokeuser": "ambari-qa",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz"
+            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab"
         }
         }
     },
     },
     "configurationTags": {
     "configurationTags": {

+ 1 - 12
ambari-server/src/test/python/stacks/2.0.6/configs/secured.json

@@ -550,18 +550,7 @@
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
             "user_group": "hadoop",
             "user_group": "hadoop",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
-            "kinit_path_local": "/usr/bin",
-	        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-	        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-	        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-	        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-	        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-	        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-	        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-	        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-	        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
+            "kinit_path_local": "/usr/bin"
         },
         },
         "hadoop-env": {
         "hadoop-env": {
             "namenode_opt_maxnewsize": "200m",
             "namenode_opt_maxnewsize": "200m",

+ 3 - 15
ambari-server/src/test/python/stacks/2.0.6/configs/zk-service_check_2.2.json

@@ -53,22 +53,10 @@
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "ignore_groupsusers_create": "false", 
+            "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "smokeuser": "ambari-qa", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
+            "user_group": "hadoop",
+            "smokeuser": "ambari-qa"
         }
         }
     }, 
     }, 
     "commandId": "25-1", 
     "commandId": "25-1", 

+ 2 - 14
ambari-server/src/test/python/stacks/2.1/configs/client-upgrade.json

@@ -540,22 +540,10 @@
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "ignore_groupsusers_create": "false", 
             "ignore_groupsusers_create": "false", 
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "smokeuser": "ambari-qa", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
+            "user_group": "hadoop",
+            "smokeuser": "ambari-qa"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 10 - 20
ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py

@@ -26,7 +26,10 @@ class TestPigServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "PIG/0.12.0.2.0/package"
   COMMON_SERVICES_PACKAGE_DIR = "PIG/0.12.0.2.0/package"
   STACK_VERSION = "2.2"
   STACK_VERSION = "2.2"
 
 
-  def test_service_check_secure(self):
+  @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
+  def test_service_check_secure(self, copy_to_hdfs_mock):
+    copy_to_hdfs_mock.return_value = True
+
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
                        classname="PigServiceCheck",
                        classname="PigServiceCheck",
                        command="service_check",
                        command="service_check",
@@ -69,16 +72,13 @@ class TestPigServiceCheck(RMFTestCase):
       content=StaticFile("pigSmoke.sh"),
       content=StaticFile("pigSmoke.sh"),
       mode=0755
       mode=0755
     )
     )
-    
-    
-
     self.assertResourceCalled("Execute", "pig /tmp/pigSmoke.sh",
     self.assertResourceCalled("Execute", "pig /tmp/pigSmoke.sh",
       path=["/usr/hdp/current/pig-client/bin:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin"],
       path=["/usr/hdp/current/pig-client/bin:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin"],
       tries=3,
       tries=3,
       user="ambari-qa",
       user="ambari-qa",
-      try_sleep=5
+      try_sleep=5,
+      logoutput=True
     )
     )
-
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
         bin_dir = '/usr/hdp/current/hadoop-client/bin',
         bin_dir = '/usr/hdp/current/hadoop-client/bin',
         user = 'ambari-qa',
         user = 'ambari-qa',
@@ -105,19 +105,8 @@ class TestPigServiceCheck(RMFTestCase):
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'file',
         type = 'file',
     )
     )
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.2.0.0/tez//tez.tar.gz',
-        security_enabled = True,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        source = '/usr/hdp/current/tez-client/lib/tez.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        action = ['create_on_execute'],
-    )
+
+    copy_to_hdfs_mock.assert_called_with("tez", "hadoop", "hdfs")
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = True,
         security_enabled = True,
         hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
         hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
@@ -135,7 +124,8 @@ class TestPigServiceCheck(RMFTestCase):
       tries=3,
       tries=3,
       try_sleep=5,
       try_sleep=5,
       path=["/usr/hdp/current/pig-client/bin:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin"],
       path=["/usr/hdp/current/pig-client/bin:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin"],
-      user="ambari-qa"
+      user="ambari-qa",
+      logoutput=True
     )
     )
 
 
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',

+ 7 - 37
ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py

@@ -80,7 +80,7 @@ class TestJobHistoryServer(RMFTestCase):
     )
     )
     self.assert_configure_secured()
     self.assert_configure_secured()
     self.assertNoMoreResources()
     self.assertNoMoreResources()
-    
+
   def test_start_secured(self):
   def test_start_secured(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/job_history_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/job_history_server.py",
                    classname = "JobHistoryServer",
                    classname = "JobHistoryServer",
@@ -93,28 +93,7 @@ class TestJobHistoryServer(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/spark.service.keytab spark/localhost@EXAMPLE.COM; ',
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/spark.service.keytab spark/localhost@EXAMPLE.COM; ',
         user = 'spark',
         user = 'spark',
     )
     )
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.2.0.0/tez//tez.tar.gz',
-        security_enabled = True,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/hdp/current/tez-client/lib/tez.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = UnknownConfigurationMock(),
-        owner = UnknownConfigurationMock(),
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = UnknownConfigurationMock(),
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
+
     self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-history-server.sh',
     self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-history-server.sh',
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = 'ls /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid >/dev/null 2>&1 && ps -p `cat /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid >/dev/null 2>&1 && ps -p `cat /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid` >/dev/null 2>&1',
@@ -253,13 +232,15 @@ class TestJobHistoryServer(RMFTestCase):
         group = 'spark',
         group = 'spark',
     )
     )
 
 
-  def test_pre_rolling_restart_23(self):
+  @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
+  def test_pre_rolling_restart_23(self, copy_to_hdfs_mock):
     config_file = self.get_src_folder()+"/test/python/stacks/2.2/configs/default.json"
     config_file = self.get_src_folder()+"/test/python/stacks/2.2/configs/default.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
       json_content = json.load(f)
       json_content = json.load(f)
     version = '2.3.0.0-1234'
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
     json_content['commandParams']['version'] = version
 
 
+    copy_to_hdfs_mock.return_value = True
     mocks_dict = {}
     mocks_dict = {}
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/job_history_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/job_history_server.py",
                        classname = "JobHistoryServer",
                        classname = "JobHistoryServer",
@@ -271,19 +252,8 @@ class TestJobHistoryServer(RMFTestCase):
                        mocks_dict = mocks_dict)
                        mocks_dict = mocks_dict)
 
 
     self.assertResourceCalled('Execute', 'hdp-select set spark-historyserver {0}'.format(version))
     self.assertResourceCalled('Execute', 'hdp-select set spark-historyserver {0}'.format(version))
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.2.0.0/tez//tez.tar.gz',
-        security_enabled = False,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/hdp/current/tez-client/lib/tez.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        action = ['create_on_execute'],
-    )
+
+    copy_to_hdfs_mock.assert_called_with("tez", "hadoop", "hdfs")
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
         security_enabled = False,
         hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
         hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',

+ 2 - 0
ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py

@@ -36,6 +36,7 @@ class TestServiceCheck(RMFTestCase):
     self.assertResourceCalled('Execute', "curl -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://localhost:18080 | grep 200",
     self.assertResourceCalled('Execute', "curl -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://localhost:18080 | grep 200",
         tries = 10,
         tries = 10,
         try_sleep = 3,
         try_sleep = 3,
+        logoutput = True
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
     
     
@@ -54,5 +55,6 @@ class TestServiceCheck(RMFTestCase):
     self.assertResourceCalled('Execute', "curl -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://localhost:18080 | grep 200",
     self.assertResourceCalled('Execute', "curl -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://localhost:18080 | grep 200",
         tries = 10,
         tries = 10,
         try_sleep = 3,
         try_sleep = 3,
+        logoutput = True
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()

+ 1 - 12
ambari-server/src/test/python/stacks/2.2/configs/default.json

@@ -186,18 +186,7 @@
             "ignore_groupsusers_create": "false",
             "ignore_groupsusers_create": "false",
             "smokeuser": "ambari-qa",
             "smokeuser": "ambari-qa",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "user_group": "hadoop",
-	        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-	        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-	        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-	        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-	        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-	        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-	        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-	        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-	        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
+            "user_group": "hadoop"
         },
         },
         "ranger-knox-plugin-properties": {
         "ranger-knox-plugin-properties": {
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 

+ 3 - 15
ambari-server/src/test/python/stacks/2.2/configs/falcon-upgrade.json

@@ -222,22 +222,10 @@
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "ignore_groupsusers_create": "false", 
+            "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "smokeuser": "ambari-qa", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
+            "user_group": "hadoop",
+            "smokeuser": "ambari-qa"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 1 - 13
ambari-server/src/test/python/stacks/2.2/configs/hive-upgrade.json

@@ -374,22 +374,10 @@
         },
         },
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "ignore_groupsusers_create": "false",
             "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz",
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
             "user_group": "hadoop",
             "user_group": "hadoop",
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "smokeuser": "ambari-qa",
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
+            "smokeuser": "ambari-qa"
         },
         },
         "ranger-hive-plugin-properties": {
         "ranger-hive-plugin-properties": {
             "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", 
             "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", 

+ 7 - 19
ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json

@@ -1027,25 +1027,13 @@
             "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#   http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied.  See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\n\n# ***** Set root logger level to DEBUG and its only appender to A.\nlog4j.logger.org.apache.pig=info, A\n\n# ***** A is set to be a ConsoleAppender.\nlog4j.appender.A=org.apache.log4j.ConsoleAppender\n# ***** A uses PatternLayout.\nlog4j.appender.A.layout=org.apache.log4j.PatternLayout\nlog4j.appender.A.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n"
             "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#   http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied.  See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\n\n# ***** Set root logger level to DEBUG and its only appender to A.\nlog4j.logger.org.apache.pig=info, A\n\n# ***** A is set to be a ConsoleAppender.\nlog4j.appender.A=org.apache.log4j.ConsoleAppender\n# ***** A uses PatternLayout.\nlog4j.appender.A.layout=org.apache.log4j.PatternLayout\nlog4j.appender.A.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "true", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "user_group": "hadoop", 
-            "ignore_groupsusers_create": "false", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "smokeuser": "ambari-qa", 
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz"
+            "security_enabled": "true",
+            "ignore_groupsusers_create": "false",
+            "kerberos_domain": "EXAMPLE.COM",
+            "user_group": "hadoop",
+            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
+            "smokeuser": "ambari-qa",
+            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 5 - 19
ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json

@@ -1027,25 +1027,11 @@
             "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#   http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied.  See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\n\n# ***** Set root logger level to DEBUG and its only appender to A.\nlog4j.logger.org.apache.pig=info, A\n\n# ***** A is set to be a ConsoleAppender.\nlog4j.appender.A=org.apache.log4j.ConsoleAppender\n# ***** A uses PatternLayout.\nlog4j.appender.A.layout=org.apache.log4j.PatternLayout\nlog4j.appender.A.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n"
             "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#   http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied.  See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\n\n# ***** Set root logger level to DEBUG and its only appender to A.\nlog4j.logger.org.apache.pig=info, A\n\n# ***** A is set to be a ConsoleAppender.\nlog4j.appender.A=org.apache.log4j.ConsoleAppender\n# ***** A uses PatternLayout.\nlog4j.appender.A.layout=org.apache.log4j.PatternLayout\nlog4j.appender.A.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "true", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "user_group": "hadoop", 
-            "ignore_groupsusers_create": "false", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "smokeuser": "ambari-qa", 
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz"
+            "security_enabled": "true",
+            "ignore_groupsusers_create": "false",
+            "kerberos_domain": "EXAMPLE.COM",
+            "user_group": "hadoop",
+            "smokeuser": "ambari-qa"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 1 - 13
ambari-server/src/test/python/stacks/2.2/configs/oozie-downgrade.json

@@ -173,22 +173,10 @@
         },
         },
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "ignore_groupsusers_create": "false",
             "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz",
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
             "user_group": "hadoop",
             "user_group": "hadoop",
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "smokeuser": "ambari-qa",
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
+            "smokeuser": "ambari-qa"
         }
         }
     },
     },
     "configurationTags": {
     "configurationTags": {

+ 1 - 13
ambari-server/src/test/python/stacks/2.2/configs/oozie-upgrade.json

@@ -172,22 +172,10 @@
         },
         },
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "ignore_groupsusers_create": "false",
             "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz",
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
             "user_group": "hadoop",
             "user_group": "hadoop",
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "smokeuser": "ambari-qa",
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
+            "smokeuser": "ambari-qa"
         }
         }
     },
     },
     "configurationTags": {
     "configurationTags": {

+ 7 - 19
ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json

@@ -522,25 +522,13 @@
             "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#   http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied.  See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\n\n# ***** Set root logger level to DEBUG and its only appender to A.\nlog4j.logger.org.apache.pig=info, A\n\n# ***** A is set to be a ConsoleAppender.\nlog4j.appender.A=org.apache.log4j.ConsoleAppender\n# ***** A uses PatternLayout.\nlog4j.appender.A.layout=org.apache.log4j.PatternLayout\nlog4j.appender.A.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n"
             "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#   http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied.  See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\n\n# ***** Set root logger level to DEBUG and its only appender to A.\nlog4j.logger.org.apache.pig=info, A\n\n# ***** A is set to be a ConsoleAppender.\nlog4j.appender.A=org.apache.log4j.ConsoleAppender\n# ***** A uses PatternLayout.\nlog4j.appender.A.layout=org.apache.log4j.PatternLayout\nlog4j.appender.A.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "true", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "user_group": "hadoop", 
-            "ignore_groupsusers_create": "false", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "smokeuser": "ambari-qa", 
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz"
+            "security_enabled": "true",
+            "ignore_groupsusers_create": "false",
+            "kerberos_domain": "EXAMPLE.COM",
+            "user_group": "hadoop",
+            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
+            "smokeuser": "ambari-qa",
+            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 7 - 19
ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json

@@ -803,25 +803,13 @@
             "zookeeper_principal_name": "zookeeper/_HOST@EXAMPLE.COM"
             "zookeeper_principal_name": "zookeeper/_HOST@EXAMPLE.COM"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "true", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "user_group": "hadoop", 
-            "ignore_groupsusers_create": "false", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "smokeuser": "ambari-qa", 
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz"
+            "security_enabled": "true",
+            "ignore_groupsusers_create": "false",
+            "kerberos_domain": "EXAMPLE.COM",
+            "user_group": "hadoop",
+            "smokeuser": "ambari-qa",
+            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
+            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 7 - 19
ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json

@@ -802,25 +802,13 @@
             "zookeeper_principal_name": "zookeeper/_HOST@EXAMPLE.COM"
             "zookeeper_principal_name": "zookeeper/_HOST@EXAMPLE.COM"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "true", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-            "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "user_group": "hadoop", 
-            "ignore_groupsusers_create": "false", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "smokeuser": "ambari-qa", 
-            "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz"
+            "security_enabled": "true",
+            "ignore_groupsusers_create": "false",
+            "kerberos_domain": "EXAMPLE.COM",
+            "user_group": "hadoop",
+            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
+            "smokeuser": "ambari-qa",
+            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab" 
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 1 - 12
ambari-server/src/test/python/stacks/2.2/configs/secured.json

@@ -174,18 +174,7 @@
             "user_group": "hadoop",
             "user_group": "hadoop",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
             "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
             "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
-            "kinit_path_local": "/usr/bin",
-	        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-	        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-	        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-	        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-	        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-	        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-	        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-	        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-	        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
+            "kinit_path_local": "/usr/bin"
         },
         },
         "webhcat-site": {
         "webhcat-site": {
             "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",
             "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",

File diff suppressed because it is too large
+ 0 - 0
ambari-server/src/test/python/stacks/2.2/configs/spark-job-history-server.json


File diff suppressed because it is too large
+ 0 - 0
ambari-server/src/test/python/stacks/2.3/configs/hbase_default.json


File diff suppressed because it is too large
+ 0 - 0
ambari-server/src/test/python/stacks/2.3/configs/hbase_secure.json


File diff suppressed because it is too large
+ 0 - 0
ambari-server/src/test/python/stacks/2.3/configs/storm_default.json


File diff suppressed because it is too large
+ 0 - 0
ambari-server/src/test/resources/custom_actions/ru_execute_tasks_namenode_prepare.json


Some files were not shown because too many files changed in this diff