Преглед на файлове

AMBARI-9997. Add Service Check to Tez Service.(vbrodetskyi)

Vitaly Brodetskyi преди 10 години
родител
ревизия
271660a6d0
променени са 15 файла, в които са добавени 291 реда и са изтрити 81 реда
  1. 1 0
      ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
  2. 53 0
      ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py
  3. 0 5
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
  4. 2 2
      ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
  5. 7 1
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/metainfo.xml
  6. 6 0
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
  7. 98 0
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
  8. 0 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
  9. 11 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
  10. 7 1
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
  11. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
  12. 8 58
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  13. 12 11
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
  14. 81 0
      ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
  15. 3 2
      ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py

+ 1 - 0
ambari-common/src/main/python/resource_management/libraries/functions/__init__.py

@@ -38,6 +38,7 @@ from resource_management.libraries.functions.hive_check import *
 from resource_management.libraries.functions.version import *
 from resource_management.libraries.functions.format_jvm_option import *
 from resource_management.libraries.functions.constants import *
+from resource_management.libraries.functions.get_hdp_version import *
 
 IS_WINDOWS = platform.system() == "Windows"
 

+ 53 - 0
ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py

@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+__all__ = ["get_hdp_version"]
+import os
+import re
+from resource_management.core.logger import Logger
+from resource_management.core.exceptions import Fail
+from resource_management.core import shell
+
+
+def get_hdp_version(package_name):
+  """
+  @param package_name, name of the package, from which, function will try to get hdp version
+  """
+  try:
+    command = 'hdp-select status ' + package_name
+    return_code, hdp_output = shell.call(command, timeout=20)
+  except Exception, e:
+    Logger.error(str(e))
+    raise Fail('Unable to execute hdp-select command to retrieve the version.')
+
+  if return_code != 0:
+    raise Fail(
+      'Unable to determine the current version because of a non-zero return code of {0}'.format(str(return_code)))
+
+  hdp_version = re.sub(package_name + ' - ', '', hdp_output)
+  hdp_version = hdp_version.rstrip()
+  match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', hdp_version)
+
+  if match is None:
+    raise Fail('Failed to get extracted version')
+
+  return hdp_version

+ 0 - 5
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -26,7 +26,6 @@ from resource_management.libraries.functions.dynamic_variable_interpretation imp
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
-from install_jars import install_tez_jars
 from setup_ranger_hive import setup_ranger_hive
 
 class HiveServer(Script):
@@ -42,9 +41,6 @@ class HiveServer(Script):
   def configure(self, env):
     import params
     env.set_params(params)
-    if not (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >=0):
-      install_tez_jars()
-
     hive(name='hiveserver2')
 
 
@@ -55,7 +51,6 @@ class HiveServer(Script):
 
     # This function is needed in HDP 2.2, but it is safe to call in earlier versions.
     copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
-    copy_tarballs_to_hdfs('tez', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
     setup_ranger_hive()    
     hive_service( 'hiveserver2', action = 'start',
       rolling_restart=rolling_restart )

+ 2 - 2
ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py

@@ -28,7 +28,7 @@ class MahoutServiceCheck(Script):
     env.set_params(params)
 
     create_input_dir_cmd = format("fs -mkdir /user/{smokeuser}/mahoutsmokeinput")
-    copy_file_to_hdfs_cmd = format("fs -put {tmp_dir}/sample-mahout-test.txt /user/{smokeuser}/mahoutsmokeinput/")
+    copy_test_file_to_hdfs_cmd = format("fs -put {tmp_dir}/sample-mahout-test.txt /user/{smokeuser}/mahoutsmokeinput/")
     mahout_command = format("mahout seqdirectory --input /user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt "
                             "--output /user/{smokeuser}/mahoutsmokeoutput/ --charset utf-8")
     test_command = format("fs -test -e /user/{smokeuser}/mahoutsmokeoutput/_SUCCESS")
@@ -61,7 +61,7 @@ class MahoutServiceCheck(Script):
         mode = 0755
     )
 
-    ExecuteHadoop( copy_file_to_hdfs_cmd,
+    ExecuteHadoop( copy_test_file_to_hdfs_cmd,
                    tries = 3,
                    try_sleep = 5,
                    user = params.smokeuser,

+ 7 - 1
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/metainfo.xml

@@ -59,7 +59,13 @@
           </packages>
         </osSpecific>
       </osSpecifics>
-      
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
       <requiredServices>
         <service>YARN</service>
       </requiredServices>

+ 6 - 0
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py

@@ -23,6 +23,7 @@ from resource_management import *
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 stack_name = default("/hostLevelParams/stack_name", None)
 
@@ -35,12 +36,17 @@ version = default("/commandParams/version", None)
 
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+  path_to_tez_examples_jar = "/usr/hdp/{hdp_version}/tez/tez-examples*.jar"
 else:
   hadoop_bin_dir = "/usr/bin"
+  path_to_tez_examples_jar = "/usr/lib/tez/tez-mapreduce-examples*.jar"
 hadoop_conf_dir = "/etc/hadoop/conf"
 
 kinit_path_local = functions.get_kinit_path()
 security_enabled = config['configurations']['cluster-env']['security_enabled']
+smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']

+ 98 - 0
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py

@@ -0,0 +1,98 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
+
+class TezServiceCheck(Script):
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+
+    if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
+      hdp_version = functions.get_hdp_version("hadoop-client")
+
+    path_to_tez_jar = format(params.path_to_tez_examples_jar)
+    copy_test_file_to_hdfs_cmd =  format("fs -put {tmp_dir}/sample-tez-test /tmp/tezsmokeinput/")
+    create_input_dir_cmd = format("fs -mkdir /tmp/tezsmokeinput")
+    wordcount_command = format("jar {path_to_tez_jar} orderedwordcount "
+                               "/tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/")
+    test_command = format("fs -test -e /tmp/tezsmokeoutput/_SUCCESS")
+    remove_output_input_dirs_cmd = "fs -rm -r -f /tmp/tezsmokeinput /tmp/tezsmokeoutput"
+
+
+    ExecuteHadoop( remove_output_input_dirs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   # for kinit run
+                   keytab = params.smoke_user_keytab,
+                   principal = params.smokeuser_principal,
+                   security_enabled = params.security_enabled,
+                   kinit_path_local = params.kinit_path_local,
+                   bin_dir = params.hadoop_bin_dir
+    )
+
+    ExecuteHadoop( create_input_dir_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
+    )
+
+    File( format("{tmp_dir}/sample-tez-test"),
+          content = "foo\nbar\nfoo\nbar\nfoo",
+          mode = 0755
+    )
+
+    ExecuteHadoop( copy_test_file_to_hdfs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
+    )
+
+    ExecuteHadoop( wordcount_command,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
+    )
+
+    ExecuteHadoop( test_command,
+                   tries = 10,
+                   try_sleep = 6,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
+    )
+
+
+
+
+if __name__ == "__main__":
+  TezServiceCheck().execute()
+

+ 0 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py → ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py


+ 11 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py

@@ -186,6 +186,17 @@ mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-si
 mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']
 jobhistory_heapsize = default("/configurations/mapred-env/jobhistory_heapsize", "900")
 
+# Tez-related properties
+tez_user = config['configurations']['tez-env']['tez_user']
+
+# Tez jars
+tez_local_api_jars = '/usr/lib/tez/tez*.jar'
+tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+app_dir_files = {tez_local_api_jars:None}
+
+# Tez libraries
+tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
+
 #for create_hdfs_directory
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']

+ 7 - 1
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py

@@ -21,10 +21,11 @@ Ambari Agent
 
 from resource_management import *
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
-
+from install_jars import install_tez_jars
 from yarn import yarn
 from service import service
 
@@ -56,6 +57,11 @@ class Resourcemanager(Script):
 
     env.set_params(params)
     self.configure(env) # FOR SECURITY
+    if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.1') == 0:
+      install_tez_jars()
+    else:
+      # will work only for stack versions >=2.2
+      copy_tarballs_to_hdfs('tez', 'hadoop-yarn-resourcemanager', params.tez_user, params.hdfs_user, params.user_group)
     service('resourcemanager',
             action='start'
     )

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json

@@ -10,7 +10,8 @@
     "STORM_REST_API-START" : ["NIMBUS-START", "STORM_UI_SERVER-START", "SUPERVISOR-START", "DRPC_SERVER-START"],
     "STORM_SERVICE_CHECK-SERVICE_CHECK": ["NIMBUS-START", "SUPERVISOR-START", "STORM_UI_SERVER-START",
         "DRPC_SERVER-START"],
-    "NIMBUS-STOP" : ["SUPERVISOR-STOP", "STORM_UI_SERVER-STOP", "DRPC_SERVER-STOP"]
+    "NIMBUS-STOP" : ["SUPERVISOR-STOP", "STORM_UI_SERVER-STOP", "DRPC_SERVER-STOP"],
+    "TEZ_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"]
   },
   "_comment" : "Dependencies that are used when GLUSTERFS is not present in cluster",
   "optional_no_glusterfs": {

+ 8 - 58
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -57,7 +57,6 @@ class TestHiveServer(RMFTestCase):
     )
 
     self.assert_configure_default()
-    self.printResources()
     self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020/apps/hive/warehouse ',
         environment = {'PATH' : "/bin:/usr/lib/hive/bin:/usr/bin"},
         user = 'hive',
@@ -183,56 +182,6 @@ class TestHiveServer(RMFTestCase):
     self.assertFalse(socket_mock.called)
 
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/apps/tez/',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0755,
-                              owner = 'tez',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/tez/lib/',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0755,
-                              owner = 'tez',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              )
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/tez*.jar',
-                              hadoop_bin_dir = '/usr/bin',
-                              hdfs_user = 'hdfs',
-                              owner = 'tez',
-                              dest_file = None,
-                              kinnit_if_needed = '',
-                              dest_dir = '/apps/tez/',
-                              hadoop_conf_dir = '/etc/hadoop/conf',
-                              mode = 0755,
-                              )
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/lib/*.jar',
-                              hadoop_conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              owner = 'tez',
-                              kinnit_if_needed = '',
-                              dest_dir = '/apps/tez/lib/',
-                              hadoop_bin_dir = '/usr/bin',
-                              mode = 0755,
-                              )
     self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
                               security_enabled = False,
                               keytab = UnknownConfigurationMock(),
@@ -335,33 +284,34 @@ class TestHiveServer(RMFTestCase):
                               sudo = True,
                               )
     self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
-        mode = 0644,
-    )
+                              mode = 0644,
+                              )
     self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
-        content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
-    )
+                              content = DownloadSource('http://c6401.ambari.apache.org:8080/resources'
+                                                       '/DBConnectionVerification.jar'),
+                              )
     self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
                               content = Template('startHiveserver2.sh.j2'),
                               mode = 0755,
                               )
     self.assertResourceCalled('Directory', '/var/run/hive',
                               owner = 'hive',
-                              group = 'hadoop',
                               mode = 0755,
+                              group = 'hadoop',
                               recursive = True,
                               cd_access = 'a',
                               )
     self.assertResourceCalled('Directory', '/var/log/hive',
                               owner = 'hive',
-                              group = 'hadoop',
                               mode = 0755,
+                              group = 'hadoop',
                               recursive = True,
                               cd_access = 'a',
                               )
     self.assertResourceCalled('Directory', '/var/lib/hive',
                               owner = 'hive',
-                              group = 'hadoop',
                               mode = 0755,
+                              group = 'hadoop',
                               recursive = True,
                               cd_access = 'a',
                               )

+ 12 - 11
ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py

@@ -51,18 +51,19 @@ class TestResourceManager(RMFTestCase):
 
     self.assert_configure_default()
 
-    pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1'
-
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
-                              not_if=pid_check_cmd,
-                              action=['delete'])
+                              action = ['delete'],
+                              not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+                              )
     self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start resourcemanager',
-                              not_if=pid_check_cmd,
-                              user='yarn')
-    self.assertResourceCalled('Execute', pid_check_cmd,
-                              user='yarn',
-                              not_if=pid_check_cmd,
-                              initial_wait=5)
+                              not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+                              user = 'yarn',
+                              )
+    self.assertResourceCalled('Execute', 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+                              initial_wait = 5,
+                              not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+                              user = 'yarn',
+                              )
     self.assertNoMoreResources()
 
   def test_stop_default(self):
@@ -571,4 +572,4 @@ class TestResourceManager(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
+    put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})

+ 81 - 0
ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py

@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from stacks.utils.RMFTestCase import *
+
+
+class TestFalconServer(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "TEZ/0.4.0.2.1/package"
+  STACK_VERSION = "2.1"
+
+  def test_service_check(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
+                       classname="TezServiceCheck",
+                       command="service_check",
+                       config_file="default.json",
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /tmp/tezsmokeinput /tmp/tezsmokeoutput',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              try_sleep = 5,
+                              kinit_path_local = '/usr/bin/kinit',
+                              tries = 3,
+                              user = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              principal = UnknownConfigurationMock(),
+                              )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp/tezsmokeinput',
+                              try_sleep = 5,
+                              tries = 3,
+                              bin_dir = '/usr/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/etc/hadoop/conf',
+                              )
+    self.assertResourceCalled('File', '/tmp/sample-tez-test',
+                              content = 'foo\nbar\nfoo\nbar\nfoo',
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -put /tmp/sample-tez-test /tmp/tezsmokeinput/',
+                              try_sleep = 5,
+                              tries = 3,
+                              bin_dir = '/usr/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/etc/hadoop/conf',
+                              )
+    self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/tez/tez-mapreduce-examples*.jar orderedwordcount /tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/',
+                              try_sleep = 5,
+                              tries = 3,
+                              bin_dir = '/usr/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/etc/hadoop/conf',
+                              )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/tezsmokeoutput/_SUCCESS',
+                              try_sleep = 6,
+                              tries = 10,
+                              bin_dir = '/usr/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/etc/hadoop/conf',
+                              )
+    self.assertNoMoreResources()
+
+

+ 3 - 2
ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py

@@ -60,8 +60,8 @@ class TestTezClient(RMFTestCase):
 
     self.assertNoMoreResources()
 
-
-  def test_upgrade(self):
+  @patch("resource_management.libraries.functions.get_hdp_version")
+  def test_upgrade(self, get_hdp_version_mock):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/tez_client.py",
                        classname = "TezClient",
                        command = "restart",
@@ -69,6 +69,7 @@ class TestTezClient(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
 
+    get_hdp_version_mock.return_value = "2.2.1.0-2067"
     self.assertResourceCalled("Execute", "hdp-select set hadoop-client 2.2.1.0-2067")
 
     # for now, it's enough that hdp-select is confirmed