Browse Source

AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

Andrew Onishuk 10 years ago
parent
commit
6e8dce4433
100 changed files with 2630 additions and 2144 deletions
  1. 7 0
      ambari-agent/pom.xml
  2. 3 3
      ambari-agent/src/test/python/resource_management/TestContentSources.py
  3. 0 68
      ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
  4. 5 5
      ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
  5. 3 3
      ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
  6. 4 4
      ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
  7. 8 3
      ambari-common/src/main/python/resource_management/core/source.py
  8. 0 1
      ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
  9. 28 1
      ambari-common/src/main/python/resource_management/libraries/functions/version.py
  10. 1 2
      ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
  11. 0 94
      ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
  12. 0 112
      ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
  13. 111 0
      ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
  14. 1 2
      ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
  15. 0 41
      ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
  16. 0 45
      ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
  17. 76 0
      ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
  18. 2 0
      ambari-common/src/main/python/resource_management/libraries/script/script.py
  19. 3 3
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py
  20. 9 5
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py
  21. 10 9
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
  22. 8 6
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
  23. 8 8
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
  24. 13 10
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
  25. 11 9
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
  26. 8 6
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
  27. 12 9
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
  28. 0 1
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
  29. BIN
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
  30. 5 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
  31. 9 6
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
  32. 0 1
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
  33. 0 1
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
  34. 11 9
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
  35. 14 39
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
  36. 5 17
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
  37. 89 27
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
  38. 8 5
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
  39. 50 13
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
  40. 0 71
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
  41. 27 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
  42. 24 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
  43. 9 8
      ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
  44. 14 35
      ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
  45. 10 42
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
  46. 45 0
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
  47. 4 2
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
  48. 10 6
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
  49. 13 9
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
  50. 31 7
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
  51. 16 9
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
  52. 35 32
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
  53. 8 2
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
  54. 12 9
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
  55. 4 2
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
  56. 8 1
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
  57. 5 7
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
  58. 14 11
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
  59. 13 40
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
  60. 22 3
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
  61. 30 38
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
  62. 8 19
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
  63. 26 11
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
  64. 9 3
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
  65. 1 1
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
  66. 19 19
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
  67. 103 89
      ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
  68. 0 91
      ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
  69. 6 0
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
  70. 6 0
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
  71. 234 214
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
  72. 6 0
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
  73. 34 25
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
  74. 6 0
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
  75. 12 0
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
  76. 225 103
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  77. 90 16
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
  78. 0 134
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
  79. 137 40
      ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
  80. 43 2
      ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
  81. 61 23
      ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
  82. 148 133
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
  83. 3 0
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
  84. 38 26
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
  85. 0 132
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
  86. 38 1
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
  87. 2 0
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
  88. 11 2
      ambari-server/src/test/python/stacks/2.0.6/configs/default.json
  89. 12 1
      ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
  90. 24 23
      ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
  91. 1 1
      ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py
  92. 48 52
      ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
  93. 83 38
      ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
  94. 76 12
      ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
  95. 12 1
      ambari-server/src/test/python/stacks/2.2/configs/default.json
  96. 12 1
      ambari-server/src/test/python/stacks/2.2/configs/secured.json
  97. 35 28
      ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
  98. 42 0
      contrib/fast-hdfs-resource/dependency-reduced-pom.xml
  99. 86 0
      contrib/fast-hdfs-resource/pom.xml
  100. 57 0
      contrib/fast-hdfs-resource/resources/example.json

+ 7 - 0
ambari-agent/pom.xml

@@ -426,6 +426,12 @@
                 </source>
                 </source>
               </sources>
               </sources>
             </mapping>
             </mapping>
+            <mapping>
+              <directory>/var/lib/ambari-agent/lib</directory>
+              <filemode>755</filemode>
+              <username>root</username>
+              <groupname>root</groupname>
+            </mapping>
           </mappings>
           </mappings>
         </configuration>
         </configuration>
       </plugin>
       </plugin>
@@ -551,6 +557,7 @@
                 <path>/var/lib/${project.artifactId}/data/tmp</path>
                 <path>/var/lib/${project.artifactId}/data/tmp</path>
                 <path>/var/lib/${project.artifactId}/keys</path>
                 <path>/var/lib/${project.artifactId}/keys</path>
                 <path>${package.log.dir}</path>
                 <path>${package.log.dir}</path>
+                <path>/var/lib/${project.artifactId}/lib</path>
               </paths>
               </paths>
               <mapper>
               <mapper>
                 <type>perm</type>
                 <type>perm</type>

+ 3 - 3
ambari-agent/src/test/python/resource_management/TestContentSources.py

@@ -221,7 +221,7 @@ class TestContentSources(TestCase):
       content = template.get_content()
       content = template.get_content()
     self.assertEqual(open_mock.call_count, 1)
     self.assertEqual(open_mock.call_count, 1)
 
 
-    self.assertEqual(u'test template content\n', content)
+    self.assertEqual(u'test template content', content)
     open_mock.assert_called_with('/absolute/path/test.j2', 'rb')
     open_mock.assert_called_with('/absolute/path/test.j2', 'rb')
     self.assertEqual(getmtime_mock.call_count, 1)
     self.assertEqual(getmtime_mock.call_count, 1)
     getmtime_mock.assert_called_with('/absolute/path/test.j2')
     getmtime_mock.assert_called_with('/absolute/path/test.j2')
@@ -234,7 +234,7 @@ class TestContentSources(TestCase):
       template = InlineTemplate("{{test_arg1}} template content", [], test_arg1 = "test")
       template = InlineTemplate("{{test_arg1}} template content", [], test_arg1 = "test")
       content = template.get_content()
       content = template.get_content()
 
 
-    self.assertEqual(u'test template content\n', content)
+    self.assertEqual(u'test template content', content)
 
 
   def test_template_imports(self):
   def test_template_imports(self):
     """
     """
@@ -250,4 +250,4 @@ class TestContentSources(TestCase):
     with Environment("/base") as env:
     with Environment("/base") as env:
       template = InlineTemplate("{{test_arg1}} template content {{os.path.join(path[0],path[1])}}", [os], test_arg1 = "test", path = ["/one","two"])
       template = InlineTemplate("{{test_arg1}} template content {{os.path.join(path[0],path[1])}}", [os], test_arg1 = "test", path = ["/one","two"])
       content = template.get_content()
       content = template.get_content()
-    self.assertEqual(u'test template content /one/two\n', content)
+    self.assertEqual(u'test template content /one/two', content)

+ 0 - 68
ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py

@@ -1,68 +0,0 @@
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-from unittest import TestCase
-from mock.mock import patch, MagicMock
-from resource_management import *
-from resource_management.core import shell
-
-@patch.object(shell, "call", new = MagicMock(return_value=(1, "")))
-@patch.object(System, "os_family", new = 'redhat')
-class TestCopyFromLocal(TestCase):
-
-  @patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
-  def test_run_default_args(self, execute_hadoop_mock):
-    with Environment() as env:
-      CopyFromLocal('/user/testdir/*.files',
-        owner='user1',
-        dest_dir='/apps/test/',
-        kinnit_if_needed='',
-        hdfs_user='hdfs'
-      )
-      self.assertEqual(execute_hadoop_mock.call_count, 2)
-      call_arg_list = execute_hadoop_mock.call_args_list
-      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
-                       call_arg_list[0][0][0].command)
-      print call_arg_list[0][0][0].arguments
-      self.assertEquals({'not_if': "ambari-sudo.sh su user1 -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls /apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
-                        call_arg_list[0][0][0].arguments)
-      self.assertEquals('fs -chown user1 /apps/test//*.files', call_arg_list[1][0][0].command)
-      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
-
-
-  @patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
-  def test_run_with_chmod(self, execute_hadoop_mock):
-    with Environment() as env:
-      CopyFromLocal('/user/testdir/*.files',
-        mode=0655,
-        owner='user1',
-        group='hdfs',
-        dest_dir='/apps/test/',
-        kinnit_if_needed='',
-        hdfs_user='hdfs'
-      )
-      self.assertEqual(execute_hadoop_mock.call_count, 3)
-      call_arg_list = execute_hadoop_mock.call_args_list
-      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
-                       call_arg_list[0][0][0].command)
-      self.assertEquals({'not_if': "ambari-sudo.sh su user1 -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls /apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
-                        call_arg_list[0][0][0].arguments)
-      self.assertEquals('fs -chown user1:hdfs /apps/test//*.files', call_arg_list[1][0][0].command)
-      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
-
-

+ 5 - 5
ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py

@@ -65,7 +65,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={}
                      properties={}
       )
       )
 
 
-    create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', u'# Generated by Apache Ambari. Today is Wednesday\n    \n    \n', encoding=None)
+    create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', u'# Generated by Apache Ambari. Today is Wednesday\n    \n    ', encoding=None)
     ensure_mock.assert_called()
     ensure_mock.assert_called()
 
 
 
 
@@ -98,7 +98,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={},
                      properties={},
       )
       )
 
 
-    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated by Apache Ambari. Some other day\n    \n    \n', encoding=None)
+    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated by Apache Ambari. Some other day\n    \n    ', encoding=None)
     ensure_mock.assert_called()
     ensure_mock.assert_called()
 
 
 
 
@@ -131,7 +131,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={'property1': 'value1'},
                      properties={'property1': 'value1'},
       )
       )
 
 
-    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty1=value1\n    \n', encoding=None)
+    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty1=value1\n    ', encoding=None)
     ensure_mock.assert_called()
     ensure_mock.assert_called()
 
 
 
 
@@ -169,7 +169,7 @@ class TestPropertiesFIleResource(TestCase):
                      },
                      },
       )
       )
 
 
-    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - %m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    \n", encoding=None)
+    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - %m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    ", encoding=None)
     ensure_mock.assert_called()
     ensure_mock.assert_called()
 
 
 
 
@@ -206,5 +206,5 @@ class TestPropertiesFIleResource(TestCase):
       )
       )
 
 
     read_file_mock.assert_called()
     read_file_mock.assert_called()
-    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty_1=value1\n    \n', encoding=None)
+    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty_1=value1\n    ', encoding=None)
     ensure_mock.assert_called()
     ensure_mock.assert_called()

+ 3 - 3
ambari-agent/src/test/python/resource_management/TestRepositoryResource.py

@@ -164,7 +164,7 @@ class TestRepositoryResource(TestCase):
       template_content = call_content[1]['content']
       template_content = call_content[1]['content']
       
       
       self.assertEquals(template_name, '/tmp/1.txt')
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n')
+      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c')
       
       
       copy_item = str(file_mock.call_args_list[1])
       copy_item = str(file_mock.call_args_list[1])
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))")
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))")
@@ -205,7 +205,7 @@ class TestRepositoryResource(TestCase):
       template_content = call_content[1]['content']
       template_content = call_content[1]['content']
 
 
       self.assertEquals(template_name, '/tmp/1.txt')
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n')
+      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c')
 
 
       copy_item = str(file_mock.call_args_list[1])
       copy_item = str(file_mock.call_args_list[1])
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))")
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))")
@@ -239,7 +239,7 @@ class TestRepositoryResource(TestCase):
       template_content = call_content[1]['content']
       template_content = call_content[1]['content']
       
       
       self.assertEquals(template_name, '/tmp/1.txt')
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n')
+      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c')
       
       
       self.assertEqual(file_mock.call_count, 1)
       self.assertEqual(file_mock.call_count, 1)
       self.assertEqual(execute_mock.call_count, 0)
       self.assertEqual(execute_mock.call_count, 0)

+ 4 - 4
ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py

@@ -62,7 +62,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 configuration_attributes={}
                 )
                 )
 
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n  </configuration>\n', encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n  </configuration>', encoding='UTF-8')
 
 
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -91,7 +91,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={'attr': {'property1': 'attr_value'}}
                 configuration_attributes={'attr': {'property1': 'attr_value'}}
                 )
                 )
 
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name>property1</name>\n      <value>value1</value>\n      <attr>attr_value</attr>\n    </property>\n    \n  </configuration>\n', encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name>property1</name>\n      <value>value1</value>\n      <attr>attr_value</attr>\n    </property>\n    \n  </configuration>', encoding='UTF-8')
 
 
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -144,7 +144,7 @@ class TestXmlConfigResource(TestCase):
                     }
                     }
                 })
                 })
 
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      <attr1>x</attr1>\n    </property>\n    \n    <property>\n      <name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    \n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p %c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n    <property>\n      <name>prop.4</name>\n      <value>${oozie.log.dir}/oozie.log</value>\n      <attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    </property>\n    \n    <property>\n      <name>prop.empty</name>\n      <value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n    \n  </configuration>\n', encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      <attr1>x</attr1>\n    </property>\n    \n    <property>\n      <name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    \n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p %c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n    <property>\n      <name>prop.4</name>\n      <value>${oozie.log.dir}/oozie.log</value>\n      <attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    </property>\n    \n    <property>\n      <name>prop.empty</name>\n      <value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n    \n  </configuration>', encoding='UTF-8')
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch.object(sudo, "create_file")
   @patch.object(sudo, "create_file")
@@ -177,7 +177,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 configuration_attributes={}
                 )
                 )
 
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>first</name>\n      <value>should be first</value>\n    </property>\n    \n    <property>\n      <name>second</name>\n      <value>should be second</value>\n    </property>\n    \n    <property>\n      <name>third</name>\n      <value>should be third</value>\n    </property>\n    \n    <property>\n      <name>z_last</name>\n      <value>should be last</value>\n    </property>\n    \n  </configuration>\n', encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>first</name>\n      <value>should be first</value>\n    </property>\n    \n    <property>\n      <name>second</name>\n      <value>should be second</value>\n    </property>\n    \n    <property>\n      <name>third</name>\n      <value>should be third</value>\n    </property>\n    \n    <property>\n      <name>z_last</name>\n      <value>should be last</value>\n    </property>\n    \n  </configuration>', encoding='UTF-8')
 
 
   @patch("resource_management.libraries.providers.xml_config.File")
   @patch("resource_management.libraries.providers.xml_config.File")
   @patch.object(sudo, "path_exists")
   @patch.object(sudo, "path_exists")

+ 8 - 3
ambari-common/src/main/python/resource_management/core/source.py

@@ -141,7 +141,7 @@ else:
       self.context.update(variables)
       self.context.update(variables)
       
       
       rendered = self.template.render(self.context)
       rendered = self.template.render(self.context)
-      return rendered + "\n" if not rendered.endswith('\n') else rendered
+      return rendered
     
     
   class InlineTemplate(Template):
   class InlineTemplate(Template):
     def __init__(self, name, extra_imports=[], **kwargs):
     def __init__(self, name, extra_imports=[], **kwargs):
@@ -189,9 +189,14 @@ class DownloadSource(Source):
         opener = urllib2.build_opener()
         opener = urllib2.build_opener()
       
       
       req = urllib2.Request(self.url)
       req = urllib2.Request(self.url)
-      web_file = opener.open(req)
+      
+      try:
+        web_file = opener.open(req)
+      except urllib2.HTTPError as ex:
+        raise Fail("Failed to download file from {0} due to HTTP error: {1}".format(self.url, str(ex)))
+      
       content = web_file.read()
       content = web_file.read()
-
+      
       if self.cache:
       if self.cache:
         with open(filepath, 'w') as fp:
         with open(filepath, 'w') as fp:
           fp.write(content)
           fp.write(content)

+ 0 - 1
ambari-common/src/main/python/resource_management/libraries/functions/__init__.py

@@ -40,7 +40,6 @@ from resource_management.libraries.functions.format_jvm_option import *
 from resource_management.libraries.functions.constants import *
 from resource_management.libraries.functions.constants import *
 from resource_management.libraries.functions.get_hdp_version import *
 from resource_management.libraries.functions.get_hdp_version import *
 from resource_management.libraries.functions.get_lzo_packages import *
 from resource_management.libraries.functions.get_lzo_packages import *
-from resource_management.libraries.functions.dynamic_variable_interpretation import *
 from resource_management.libraries.functions.setup_ranger_plugin import *
 from resource_management.libraries.functions.setup_ranger_plugin import *
 
 
 IS_WINDOWS = platform.system() == "Windows"
 IS_WINDOWS = platform.system() == "Windows"

+ 28 - 1
ambari-common/src/main/python/resource_management/libraries/functions/version.py

@@ -19,7 +19,13 @@ limitations under the License.
 Ambari Agent
 Ambari Agent
 
 
 """
 """
+import os
 import re
 import re
+from resource_management.core import shell
+from resource_management.core.exceptions import Fail
+from resource_management.libraries.script.config_dictionary import UnknownConfiguration
+
+__all__ = ["format_hdp_stack_version", "compare_versions", "get_hdp_build_version"]
 
 
 def _normalize(v, desired_segments=0):
 def _normalize(v, desired_segments=0):
   """
   """
@@ -70,4 +76,25 @@ def compare_versions(version1, version2):
   :return: Returns -1 if version1 is before version2, 0 if they are equal, and 1 if version1 is after version2
   :return: Returns -1 if version1 is before version2, 0 if they are equal, and 1 if version1 is after version2
   """
   """
   max_segments = max(len(version1.split(".")), len(version2.split(".")))
   max_segments = max(len(version1.split(".")), len(version2.split(".")))
-  return cmp(_normalize(version1, desired_segments=max_segments), _normalize(version2, desired_segments=max_segments))
+  return cmp(_normalize(version1, desired_segments=max_segments), _normalize(version2, desired_segments=max_segments))
+
+
+def get_hdp_build_version(hdp_stack_version):
+  """
+  Used to check hdp_stack_version for stacks >= 2.2
+  :param hdp_stack_version: version for stacks >= 2.2
+  :return: checked hdp_version (or UnknownConfiguration for stacks < 2.2)
+  """
+  HDP_SELECT = "/usr/bin/hdp-select"
+  if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.2.0.0") >= 0 and os.path.exists(HDP_SELECT):
+    code, out = shell.call('{0} status'.format(HDP_SELECT))
+
+    matches = re.findall(r"([\d\.]+\-\d+)", out)
+    hdp_version = matches[0] if matches and len(matches) > 0 else None
+
+    if not hdp_version:
+      raise Fail("Could not parse HDP version from output of hdp-select: %s" % str(out))
+
+    return hdp_version
+  else:
+    return UnknownConfiguration('hdp_version')

+ 1 - 2
ambari-common/src/main/python/resource_management/libraries/providers/__init__.py

@@ -42,8 +42,7 @@ PROVIDERS = dict(
     XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
     PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
     MonitorWebserver="resource_management.libraries.providers.monitor_webserver.MonitorWebserverProvider",
     MonitorWebserver="resource_management.libraries.providers.monitor_webserver.MonitorWebserverProvider",
-    HdfsDirectory="resource_management.libraries.providers.hdfs_directory.HdfsDirectoryProvider",
-    CopyFromLocal="resource_management.libraries.providers.copy_from_local.CopyFromLocalProvider",
+    HdfsResource="resource_management.libraries.providers.hdfs_resource.HdfsResourceProvider",
     ModifyPropertiesFile="resource_management.libraries.providers.modify_properties_file.ModifyPropertiesFileProvider"
     ModifyPropertiesFile="resource_management.libraries.providers.modify_properties_file.ModifyPropertiesFileProvider"
   ),
   ),
 )
 )

+ 0 - 94
ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py

@@ -1,94 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-import os
-from resource_management.libraries.resources.execute_hadoop import ExecuteHadoop
-from resource_management.core.providers import Provider
-from resource_management.libraries.functions.format import format
-from resource_management.core.shell import as_user
-from resource_management.core.resources.system import Execute
-
-class CopyFromLocalProvider(Provider):
-  def action_run(self):
-
-    path = self.resource.path
-    dest_dir = self.resource.dest_dir
-    dest_file = self.resource.dest_file
-    kinnit_if_needed = self.resource.kinnit_if_needed
-    user = self.resource.user   # user to perform commands as. If not provided, default to the owner
-    owner = self.resource.owner
-    group = self.resource.group
-    mode = self.resource.mode
-    hdfs_usr=self.resource.hdfs_user
-    hadoop_conf_path = self.resource.hadoop_conf_dir
-    bin_dir = self.resource.hadoop_bin_dir
-
-
-    if dest_file:
-      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}/{dest_file}")
-      dest_path = dest_dir + dest_file if dest_dir.endswith(os.sep) else dest_dir + os.sep + dest_file
-    else:
-      dest_file_name = os.path.split(path)[1]
-      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}")
-      dest_path = dest_dir + os.sep + dest_file_name
-    # Need to run unless as resource user
-    
-    if kinnit_if_needed:
-      Execute(kinnit_if_needed, 
-              user=user if user else owner,
-      )
-    
-    unless_cmd = as_user(format("PATH=$PATH:{bin_dir} hadoop fs -ls {dest_path}"), user if user else owner)
-
-    ExecuteHadoop(copy_cmd,
-                  not_if=unless_cmd,
-                  user=user if user else owner,
-                  bin_dir=bin_dir,
-                  conf_dir=hadoop_conf_path
-                  )
-
-    if not owner:
-      chown = None
-    else:
-      if not group:
-        chown = owner
-      else:
-        chown = format('{owner}:{group}')
-
-    if chown:
-      chown_cmd = format("fs -chown {chown} {dest_path}")
-
-      ExecuteHadoop(chown_cmd,
-                    user=hdfs_usr,
-                    bin_dir=bin_dir,
-                    conf_dir=hadoop_conf_path)
-    pass
-
-    if mode:
-      dir_mode = oct(mode)[1:]
-      chmod_cmd = format('fs -chmod {dir_mode} {dest_path}')
-
-      ExecuteHadoop(chmod_cmd,
-                    user=hdfs_usr,
-                    bin_dir=bin_dir,
-                    conf_dir=hadoop_conf_path)
-    pass

+ 0 - 112
ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py

@@ -1,112 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import os
-
-from resource_management import *
-directories_list = [] #direcotries list for mkdir
-chmod_map = {} #(mode,recursive):dir_list map
-chown_map = {} #(owner,group,recursive):dir_list map
-class HdfsDirectoryProvider(Provider):
-  def action_create_delayed(self):
-    global delayed_directories
-    global chmod_map
-    global chown_map
-
-    if not self.resource.dir_name:
-      return
-
-    dir_name = self.resource.dir_name
-    dir_owner = self.resource.owner
-    dir_group = self.resource.group
-    dir_mode = oct(self.resource.mode)[1:] if self.resource.mode else None
-    directories_list.append(self.resource.dir_name)
-
-    recursive_chown_str = "-R" if self.resource.recursive_chown else ""
-    recursive_chmod_str = "-R" if self.resource.recursive_chmod else ""
-    # grouping directories by mode/owner/group to modify them in one 'chXXX' call
-    if dir_mode:
-      chmod_key = (dir_mode,recursive_chmod_str)
-      if chmod_map.has_key(chmod_key):
-        chmod_map[chmod_key].append(dir_name)
-      else:
-        chmod_map[chmod_key] = [dir_name]
-
-    if dir_owner:
-      owner_key = (dir_owner,dir_group,recursive_chown_str)
-      if chown_map.has_key(owner_key):
-        chown_map[owner_key].append(dir_name)
-      else:
-        chown_map[owner_key] = [dir_name]
-
-  def action_create(self):
-    global delayed_directories
-    global chmod_map
-    global chown_map
-
-    self.action_create_delayed()
-
-    hdp_conf_dir = self.resource.conf_dir
-    hdp_hdfs_user = self.resource.hdfs_user
-    secured = self.resource.security_enabled
-    keytab_file = self.resource.keytab
-    kinit_path = self.resource.kinit_path_local
-    bin_dir = self.resource.bin_dir
-
-    chmod_commands = []
-    chown_commands = []
-
-    for chmod_key, chmod_dirs in chmod_map.items():
-      mode = chmod_key[0]
-      recursive = chmod_key[1]
-      chmod_dirs_str = ' '.join(chmod_dirs)
-      chmod_commands.append(format("hadoop --config {hdp_conf_dir} fs -chmod {recursive} {mode} {chmod_dirs_str}"))
-
-    for chown_key, chown_dirs in chown_map.items():
-      owner = chown_key[0]
-      group = chown_key[1]
-      recursive = chown_key[2]
-      chown_dirs_str = ' '.join(chown_dirs)
-      if owner:
-        chown = owner
-        if group:
-          chown = format("{owner}:{group}")
-        chown_commands.append(format("hadoop --config {hdp_conf_dir} fs -chown {recursive} {chown} {chown_dirs_str}"))
-
-    if secured:
-        Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
-                user=hdp_hdfs_user)
-    #create all directories in one 'mkdir' call
-    dir_list_str = ' '.join(directories_list)
-    #for hadoop 2 we need to specify -p to create directories recursively
-    parent_flag = '-p'
-
-    Execute(format('hadoop --config {hdp_conf_dir} fs -mkdir {parent_flag} {dir_list_str} && {chmod_cmd} && {chown_cmd}',
-                   chmod_cmd=' && '.join(chmod_commands),
-                   chown_cmd=' && '.join(chown_commands)),
-            user=hdp_hdfs_user,
-            path=bin_dir,
-            not_if=as_user(format("hadoop --config {hdp_conf_dir} fs -ls {dir_list_str}"), hdp_hdfs_user)
-    )
-
-    directories_list[:] = []
-    chmod_map.clear()
-    chown_map.clear()

+ 111 - 0
ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py

@@ -0,0 +1,111 @@
+# !/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import json
+from resource_management import *
+
+JSON_PATH = '/var/lib/ambari-agent/data/hdfs_resources.json'
+JAR_PATH = '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
+
+RESOURCE_TO_JSON_FIELDS = {
+  'target': 'target',
+  'type': 'type',
+  'action': 'action',
+  'source': 'source',
+  'owner': 'owner',
+  'group': 'group',
+  'mode': 'mode',
+  'recursive_chown': 'recursiveChown',
+  'recursive_chmod': 'recursiveChmod',
+  'change_permissions_for_parents': 'changePermissionforParents'
+}
+
+
+class HdfsResourceProvider(Provider):
+  def action_delayed(self, action_name):
+    resource = {}
+    env = Environment.get_instance()
+    if not 'hdfs_files' in env.config:
+      env.config['hdfs_files'] = []
+
+    # Check required parameters
+    if not self.resource.type or not self.resource.action:
+      raise Fail("Resource parameter type or action is not set.")
+
+    # Put values in dictionary-resource
+    for field_name, json_field_name in RESOURCE_TO_JSON_FIELDS.iteritems():
+      if field_name == 'action':
+        resource[json_field_name] = action_name
+      elif field_name == 'mode' and self.resource.mode:
+        resource[json_field_name] = oct(self.resource.mode)[1:]
+      elif getattr(self.resource, field_name):
+        resource[json_field_name] = getattr(self.resource, field_name)
+
+    # Add resource to create
+    env.config['hdfs_files'].append(resource)
+
+  def action_create_on_execute(self):
+    self.action_delayed("create")
+
+  def action_delete_on_execute(self):
+    self.action_delayed("delete")
+
+  def action_execute(self):
+    env = Environment.get_instance()
+
+    # Check required parameters
+    if not self.resource.user:
+      raise Fail("Resource parameter 'user' is not set.")
+
+    if not 'hdfs_files' in env.config or not env.config['hdfs_files']:
+      raise Fail("No resources to create. Please perform create_delayed"
+                 " or delete_delayed before doing execute action.")
+
+    hadoop_bin_dir = self.resource.hadoop_bin_dir
+    hadoop_conf_dir = self.resource.hadoop_conf_dir
+    user = self.resource.user
+    security_enabled = self.resource.security_enabled
+    keytab_file = self.resource.keytab
+    kinit_path = self.resource.kinit_path_local
+    logoutput = self.resource.logoutput
+    jar_path=JAR_PATH
+    json_path=JSON_PATH
+
+    if security_enabled:
+      Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
+              user=user
+      )
+
+    # Write json file to disk
+    File(JSON_PATH,
+         owner = user,
+         content = json.dumps(env.config['hdfs_files'])
+    )
+
+    # Execute jar to create/delete resources in hadoop
+    Execute(format("hadoop --config {hadoop_conf_dir} jar {jar_path} {json_path}"),
+            user=user,
+            path=[hadoop_bin_dir],
+            logoutput=logoutput,
+    )
+
+    # Clean
+    env.config['hdfs_files'] = []

+ 1 - 2
ambari-common/src/main/python/resource_management/libraries/resources/__init__.py

@@ -26,7 +26,6 @@ from resource_management.libraries.resources.xml_config import *
 from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.repository import *
 from resource_management.libraries.resources.repository import *
 from resource_management.libraries.resources.monitor_webserver import *
 from resource_management.libraries.resources.monitor_webserver import *
-from resource_management.libraries.resources.hdfs_directory import *
-from resource_management.libraries.resources.copy_from_local import *
+from resource_management.libraries.resources.hdfs_resource import *
 from resource_management.libraries.resources.msi import *
 from resource_management.libraries.resources.msi import *
 from resource_management.libraries.resources.modify_properties_file import *
 from resource_management.libraries.resources.modify_properties_file import *

+ 0 - 41
ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py

@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["CopyFromLocal"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-class CopyFromLocal(Resource):
-  action = ForcedListArgument(default="run")
-
-  path = ResourceArgument(default=lambda obj: obj.name)
-  dest_dir = ResourceArgument(required=True)
-  dest_file = ResourceArgument()
-  owner = ResourceArgument(required=True)  # file user owner
-  group = ResourceArgument()               # file group user
-  mode = ResourceArgument()                # file ACL mode
-  kinnit_if_needed = ResourceArgument(default='')
-  user = ResourceArgument()                # user to perform commands as. If not provided, default to the owner
-  hadoop_conf_dir = ResourceArgument(default='/etc/hadoop/conf')
-  hdfs_user = ResourceArgument(default='hdfs')
-  hadoop_bin_dir = ResourceArgument(default='/usr/bin')
-
-  actions = Resource.actions + ["run"]

+ 0 - 45
ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py

@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["HdfsDirectory"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-class HdfsDirectory(Resource):
-  action = ForcedListArgument()
-
-  dir_name = ResourceArgument(default=lambda obj: obj.name)
-  owner = ResourceArgument()
-  group = ResourceArgument()
-  mode = ResourceArgument()
-  recursive_chown = BooleanArgument(default=False)
-  recursive_chmod = BooleanArgument(default=False)
-
-  conf_dir = ResourceArgument()
-  security_enabled = BooleanArgument(default=False)
-  keytab = ResourceArgument()
-  kinit_path_local = ResourceArgument()
-  hdfs_user = ResourceArgument()
-  bin_dir = ResourceArgument(default="")
-
-  #action 'create' immediately creates all pending directory in efficient manner
-  #action 'create_delayed' add directory to list of pending directories
-  actions = Resource.actions + ["create","create_delayed"]

+ 76 - 0
ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py

@@ -0,0 +1,76 @@
+# !/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+_all__ = ["HdfsResource"]
+from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
+
+"""
+Calling a lot of hadoop commands takes too much time.
+The cause is that for every call new connection initialized, with datanodes, namenode.
+
+While this resource can gather the dicteroies/files to create/delete/copyFromLocal.
+And after just with one call create all that.
+
+action = create_delayed / delete_delayed. Are for gathering information  about what you want
+to create.
+
+After everything is gathered you should execute action = execute. To perform delayed actions
+
+The resource is a replacement for the following operations:
+  1) hadoop fs -rmr
+  2) hadoop fs -copyFromLocal
+  3) hadoop fs -put
+  4) hadoop fs -mkdir
+  5) hadoop fs -touchz
+  6) hadoop fs -chmod
+  7) hadoop fs -chown
+"""
+
+
+class HdfsResource(Resource):
+  # Required: {target, type, action}
+  # path to hadoop file/directory
+  target = ResourceArgument(default=lambda obj: obj.name)
+  # "directory" or "file"
+  type = ResourceArgument()
+  # "create_delayed" or "delete_delayed" or "execute"
+  action = ForcedListArgument()
+  # if present - copies file/directory from local path {source} to hadoop path - {target}
+  source = ResourceArgument()
+  owner = ResourceArgument()
+  group = ResourceArgument()
+  mode = ResourceArgument()
+  logoutput = ResourceArgument()
+  recursive_chown = BooleanArgument(default=False)
+  recursive_chmod = BooleanArgument(default=False)
+  change_permissions_for_parents = BooleanArgument(default=False)
+
+  security_enabled = BooleanArgument(default=False)
+  keytab = ResourceArgument()
+  kinit_path_local = ResourceArgument()
+  user = ResourceArgument()
+  hadoop_bin_dir = ResourceArgument()
+  hadoop_conf_dir = ResourceArgument()
+
+  #action 'execute' immediately creates all pending files/directories in efficient manner
+  #action 'create_delayed/delete_delayed' adds file/directory to list of pending directories
+  actions = Resource.actions + ["create_on_execute", "delete_on_execute", "execute"]

+ 2 - 0
ambari-common/src/main/python/resource_management/libraries/script/script.py

@@ -297,6 +297,7 @@ class Script(object):
       return None
       return None
 
 
     stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
     stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+
     if stack_version_unformatted is None or stack_version_unformatted == '':
     if stack_version_unformatted is None or stack_version_unformatted == '':
       return None
       return None
 
 
@@ -325,6 +326,7 @@ class Script(object):
     :return: True if the command's stack is less than the specified version
     :return: True if the command's stack is less than the specified version
     """
     """
     hdp_stack_version = Script.get_hdp_stack_version()
     hdp_stack_version = Script.get_hdp_stack_version()
+
     if hdp_stack_version is None:
     if hdp_stack_version is None:
       return False
       return False
 
 

+ 3 - 3
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py

@@ -75,13 +75,13 @@ def accumulo(name=None # 'master' or 'tserver' or 'client'
       owner=params.accumulo_user
       owner=params.accumulo_user
     )
     )
 
 
-  if name in ["master","tserver"]:
+  if name == "master":
     params.HdfsDirectory(format("{params.accumulo_hdfs_root_dir}"),
     params.HdfsDirectory(format("{params.accumulo_hdfs_root_dir}"),
-                         action="create_delayed",
+                         action="create_on_execute",
                          owner=params.accumulo_user,
                          owner=params.accumulo_user,
     )
     )
     params.HdfsDirectory(format("{params.accumulo_hdfs_stage_dir}"),
     params.HdfsDirectory(format("{params.accumulo_hdfs_stage_dir}"),
-                         action="create_delayed",
+                         action="create_on_execute",
                          owner=params.accumulo_user,
                          owner=params.accumulo_user,
                          mode=0751
                          mode=0751
     )
     )

+ 9 - 5
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py

@@ -160,17 +160,19 @@ def setup_conf_dir(name=None): # 'master' or 'tserver' or 'monitor' or 'gc' or '
 
 
   # other server setup
   # other server setup
   if name == 'master':
   if name == 'master':
-    params.HdfsDirectory(format("/user/{params.accumulo_user}"),
-                         action="create_delayed",
+    params.HdfsResource(format("/user/{params.accumulo_user}"),
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.accumulo_user,
                          owner=params.accumulo_user,
                          mode=0700
                          mode=0700
     )
     )
-    params.HdfsDirectory(format("{params.parent_dir}"),
-                         action="create_delayed",
+    params.HdfsResource(format("{params.parent_dir}"),
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.accumulo_user,
                          owner=params.accumulo_user,
                          mode=0700
                          mode=0700
     )
     )
-    params.HdfsDirectory(None, action="create")
+    params.HdfsResource(None, action="execute")
     if params.security_enabled and params.has_secure_user_auth:
     if params.security_enabled and params.has_secure_user_auth:
       Execute( format("{params.kinit_cmd} "
       Execute( format("{params.kinit_cmd} "
                       "{params.daemon_script} init "
                       "{params.daemon_script} init "
@@ -184,6 +186,7 @@ def setup_conf_dir(name=None): # 'master' or 'tserver' or 'monitor' or 'gc' or '
                                      "{params.hadoop_conf_dir} fs -stat "
                                      "{params.hadoop_conf_dir} fs -stat "
                                      "{params.instance_volumes}"),
                                      "{params.instance_volumes}"),
                               params.accumulo_user),
                               params.accumulo_user),
+               logoutput=True,
                user=params.accumulo_user)
                user=params.accumulo_user)
     else:
     else:
       passfile = format("{params.exec_tmp_dir}/pass")
       passfile = format("{params.exec_tmp_dir}/pass")
@@ -205,6 +208,7 @@ def setup_conf_dir(name=None): # 'master' or 'tserver' or 'monitor' or 'gc' or '
                                        "{params.hadoop_conf_dir} fs -stat "
                                        "{params.hadoop_conf_dir} fs -stat "
                                        "{params.instance_volumes}"),
                                        "{params.instance_volumes}"),
                                 params.accumulo_user),
                                 params.accumulo_user),
+                 logoutput=True,
                  user=params.accumulo_user)
                  user=params.accumulo_user)
       finally:
       finally:
         os.remove(passfile)
         os.remove(passfile)

+ 10 - 9
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py

@@ -16,13 +16,14 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 
 
-"""
+"""HBASE/0.96.0.2.0/package/scripts/params_linux.py
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
+from resource_management import *
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+
 
 
 import status_params
 import status_params
 
 
@@ -144,14 +145,14 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 )

+ 8 - 6
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py

@@ -167,23 +167,25 @@ def hbase(name=None # 'master' or 'regionserver' or 'client'
     hbase_TemplateConfig( format("hbase_client_jaas.conf"), user=params.hbase_user)
     hbase_TemplateConfig( format("hbase_client_jaas.conf"), user=params.hbase_user)
     hbase_TemplateConfig( format("ams_zookeeper_jaas.conf"), user=params.hbase_user)
     hbase_TemplateConfig( format("ams_zookeeper_jaas.conf"), user=params.hbase_user)
 
 
-  if name in ["master","regionserver"]:
+  if name == "master":
 
 
     if params.is_hbase_distributed:
     if params.is_hbase_distributed:
 
 
-      params.HdfsDirectory(params.hbase_root_dir,
-                           action="create_delayed",
+      params.HdfsResource(params.hbase_root_dir,
+                           type="directory",
+                           action="create_on_execute",
                            owner=params.hbase_user,
                            owner=params.hbase_user,
                            mode=0775
                            mode=0775
       )
       )
 
 
-      params.HdfsDirectory(params.hbase_staging_dir,
-                           action="create_delayed",
+      params.HdfsResource(params.hbase_staging_dir,
+                           type="directory",
+                           action="create_on_execute",
                            owner=params.hbase_user,
                            owner=params.hbase_user,
                            mode=0711
                            mode=0711
       )
       )
 
 
-      params.HdfsDirectory(None, action="create")
+      params.HdfsResource(None, action="execute")
 
 
     else:
     else:
 
 

+ 8 - 8
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py

@@ -185,17 +185,17 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
 kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 
 
 import functools
 import functools
-# create partial functions with common arguments for every HdfsDirectory call
-# to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
 
 
 
 
 
 

+ 13 - 10
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py

@@ -91,33 +91,36 @@ def falcon(type, action = None):
   if type == 'server':
   if type == 'server':
     if action == 'config':
     if action == 'config':
       if params.store_uri[0:4] == "hdfs":
       if params.store_uri[0:4] == "hdfs":
-        params.HdfsDirectory(params.store_uri,
-                             action="create_delayed",
+        params.HdfsResource(params.store_uri,
+                             type="directory",
+                             action="create_on_execute",
                              owner=params.falcon_user,
                              owner=params.falcon_user,
                              mode=0755
                              mode=0755
         )
         )
-      if params.store_uri[0:4] == "file":
+      elif params.store_uri[0:4] == "file":
         Directory(params.store_uri[7:],
         Directory(params.store_uri[7:],
                   owner=params.falcon_user,
                   owner=params.falcon_user,
                   recursive=True
                   recursive=True
         )
         )
-      params.HdfsDirectory(params.flacon_apps_dir,
-                           action="create_delayed",
+      params.HdfsResource(params.flacon_apps_dir,
+                           type="directory",
+                           action="create_on_execute",
                            owner=params.falcon_user,
                            owner=params.falcon_user,
-                           mode=0777#TODO change to proper mode
+                           mode=0777 #TODO change to proper mode
       )
       )
       if params.falcon_store_uri[0:4] == "hdfs":
       if params.falcon_store_uri[0:4] == "hdfs":
-        params.HdfsDirectory(params.falcon_store_uri,
-                             action="create_delayed",
+        params.HdfsResource(params.falcon_store_uri,
+                             type="directory",
+                             action="create_on_execute",
                              owner=params.falcon_user,
                              owner=params.falcon_user,
                              mode=0755
                              mode=0755
         )
         )
-      if params.falcon_store_uri[0:4] == "file":
+      elif params.falcon_store_uri[0:4] == "file":
         Directory(params.falcon_store_uri[7:],
         Directory(params.falcon_store_uri[7:],
                   owner=params.falcon_user,
                   owner=params.falcon_user,
                   recursive=True
                   recursive=True
         )
         )
-      params.HdfsDirectory(None, action="create")
+      params.HdfsResource(None, action="execute")
       Directory(params.falcon_local_dir,
       Directory(params.falcon_local_dir,
                 owner=params.falcon_user,
                 owner=params.falcon_user,
                 recursive=True,
                 recursive=True,

+ 11 - 9
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py

@@ -18,12 +18,13 @@ limitations under the License.
 """
 """
 import status_params
 import status_params
 
 
+from resource_management import *
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+
 
 
 config = Script.get_config()
 config = Script.get_config()
 
 
@@ -100,14 +101,15 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
 smokeuser_principal =  config['configurations']['cluster-env']['smokeuser_principal_name']
 smokeuser_principal =  config['configurations']['cluster-env']['smokeuser_principal_name']
 kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 import functools
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
+

+ 8 - 6
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py

@@ -163,17 +163,19 @@ def hbase(name=None):
       group=params.user_group,
       group=params.user_group,
       owner=params.hbase_user
       owner=params.hbase_user
     )
     )
-  if name in ["master","regionserver"]:
-    params.HdfsDirectory(params.hbase_hdfs_root_dir,
-                         action="create_delayed",
+  if name == "master":
+    params.HdfsResource(params.hbase_hdfs_root_dir,
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.hbase_user
                          owner=params.hbase_user
     )
     )
-    params.HdfsDirectory(params.hbase_staging_dir,
-                         action="create_delayed",
+    params.HdfsResource(params.hbase_staging_dir,
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.hbase_user,
                          owner=params.hbase_user,
                          mode=0711
                          mode=0711
     )
     )
-    params.HdfsDirectory(None, action="create")
+    params.HdfsResource(None, action="execute")
 
 
 def hbase_TemplateConfig(name, tag=None):
 def hbase_TemplateConfig(name, tag=None):
   import params
   import params

+ 12 - 9
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py

@@ -24,6 +24,7 @@ from functions import calc_xmn_from_xms
 
 
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 
 
+from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
@@ -31,7 +32,7 @@ from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_unique_id_and_date
 from resource_management.libraries.functions import get_unique_id_and_date
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+
 from resource_management.libraries.functions.substitute_vars import substitute_vars
 from resource_management.libraries.functions.substitute_vars import substitute_vars
 
 
 # server configurations
 # server configurations
@@ -140,7 +141,9 @@ if security_enabled:
   _hostname_lowercase = config['hostname'].lower()
   _hostname_lowercase = config['hostname'].lower()
   master_jaas_princ = config['configurations']['hbase-site']['hbase.master.kerberos.principal'].replace('_HOST',_hostname_lowercase)
   master_jaas_princ = config['configurations']['hbase-site']['hbase.master.kerberos.principal'].replace('_HOST',_hostname_lowercase)
   regionserver_jaas_princ = config['configurations']['hbase-site']['hbase.regionserver.kerberos.principal'].replace('_HOST',_hostname_lowercase)
   regionserver_jaas_princ = config['configurations']['hbase-site']['hbase.regionserver.kerberos.principal'].replace('_HOST',_hostname_lowercase)
-  queryserver_jaas_princ = config['configurations']['hbase-site']['phoenix.queryserver.kerberos.principal'].replace('_HOST',_hostname_lowercase)
+  _queryserver_jaas_princ = config['configurations']['hbase-site']['phoenix.queryserver.kerberos.principal']
+  if not is_empty(_queryserver_jaas_princ):
+    queryserver_jaas_princ =_queryserver_jaas_princ.replace('_HOST',_hostname_lowercase)
 
 
 master_keytab_path = config['configurations']['hbase-site']['hbase.master.keytab.file']
 master_keytab_path = config['configurations']['hbase-site']['hbase.master.keytab.file']
 regionserver_keytab_path = config['configurations']['hbase-site']['hbase.regionserver.keytab.file']
 regionserver_keytab_path = config['configurations']['hbase-site']['hbase.regionserver.keytab.file']
@@ -169,16 +172,16 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 )
 
 
 # ranger host
 # ranger host

+ 0 - 1
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py

@@ -46,7 +46,6 @@ class HbaseServiceCheckDefault(HbaseServiceCheck):
     env.set_params(params)
     env.set_params(params)
     
     
     output_file = "/apps/hbase/data/ambarismoketest"
     output_file = "/apps/hbase/data/ambarismoketest"
-    test_cmd = format("fs -test -e {output_file}")
     smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};") if params.security_enabled else ""
     smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};") if params.security_enabled else ""
     hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
     hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
   
   

BIN
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar


+ 5 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py

@@ -98,6 +98,11 @@ def hdfs(name=None):
        owner=tc_owner,
        owner=tc_owner,
        content=Template("slaves.j2")
        content=Template("slaves.j2")
   )
   )
+
+  # for source-code of jar goto contrib/fast-hdfs-resource
+  File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
+       content=StaticFile("fast-hdfs-resource.jar")
+  )
   
   
   if params.lzo_enabled and len(params.lzo_packages) > 0:
   if params.lzo_enabled and len(params.lzo_packages) > 0:
       Package(params.lzo_packages)
       Package(params.lzo_packages)

+ 9 - 6
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py

@@ -168,18 +168,21 @@ def create_name_dirs(directories):
 def create_hdfs_directories(check):
 def create_hdfs_directories(check):
   import params
   import params
 
 
-  params.HdfsDirectory("/tmp",
-                       action="create_delayed",
+  params.HdfsResource("/tmp",
+                       type="directory",
+                       action="create_on_execute",
                        owner=params.hdfs_user,
                        owner=params.hdfs_user,
                        mode=0777
                        mode=0777
   )
   )
-  params.HdfsDirectory(params.smoke_hdfs_user_dir,
-                       action="create_delayed",
+  params.HdfsResource(params.smoke_hdfs_user_dir,
+                       type="directory",
+                       action="create_on_execute",
                        owner=params.smoke_user,
                        owner=params.smoke_user,
                        mode=params.smoke_hdfs_user_mode
                        mode=params.smoke_hdfs_user_mode
   )
   )
-  params.HdfsDirectory(None, action="create",
-                       only_if=check #skip creation when HA not active
+  params.HdfsResource(None, 
+                      action="execute",
+                      only_if=check #skip creation when HA not active
   )
   )
 
 
 def format_namenode(force=None):
 def format_namenode(force=None):

+ 0 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py

@@ -22,7 +22,6 @@ from resource_management.core.logger import Logger
 from resource_management.core.resources import Directory
 from resource_management.core.resources import Directory
 from resource_management.core import shell
 from resource_management.core import shell
 from utils import service
 from utils import service
-from utils import hdfs_directory
 import subprocess,os
 import subprocess,os
 
 
 # NFS GATEWAY is always started by root using jsvc due to rpcbind bugs
 # NFS GATEWAY is always started by root using jsvc due to rpcbind bugs

+ 0 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py

@@ -19,7 +19,6 @@ limitations under the License.
 
 
 from resource_management import *
 from resource_management import *
 from utils import service
 from utils import service
-from utils import hdfs_directory
 from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
 from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 
 

+ 11 - 9
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py

@@ -32,7 +32,8 @@ from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_klist_path
 from resource_management.libraries.functions import get_klist_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+
 from resource_management.libraries.functions.format_jvm_option import format_jvm_option
 from resource_management.libraries.functions.format_jvm_option import format_jvm_option
 from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
 from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
 
 
@@ -96,7 +97,7 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
       hadoop_secure_dn_user = '""'
       hadoop_secure_dn_user = '""'
 
 
 
 
-
+ambari_libs_dir = "/var/lib/ambari-agent/lib"
 limits_conf_dir = "/etc/security/limits.d"
 limits_conf_dir = "/etc/security/limits.d"
 
 
 if Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.1") and not OSCheck.is_suse_family():
 if Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.1") and not OSCheck.is_suse_family():
@@ -284,18 +285,19 @@ else:
   jn_kinit_cmd = ""
   jn_kinit_cmd = ""
 
 
 import functools
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete/copyfromlocal hdfs directories/files we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 )
 
 
+
 # The logic for LZO also exists in OOZIE's params.py
 # The logic for LZO also exists in OOZIE's params.py
 io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
 io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
 lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower()
 lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower()

+ 14 - 39
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py

@@ -37,15 +37,6 @@ class HdfsServiceCheckDefault(HdfsServiceCheck):
 
 
     safemode_command = format("dfsadmin -fs {namenode_address} -safemode get | grep OFF")
     safemode_command = format("dfsadmin -fs {namenode_address} -safemode get | grep OFF")
 
 
-    create_dir_cmd = format("fs -mkdir {dir}")
-    chmod_command = format("fs -chmod 777 {dir}")
-    test_dir_exists = as_user(format("{hadoop_bin_dir}/hadoop --config {hadoop_conf_dir} fs -test -e {dir}"), params.hdfs_user)
-    cleanup_cmd = format("fs -rm {tmp_file}")
-    #cleanup put below to handle retries; if retrying there wil be a stale file
-    #that needs cleanup; exit code is fn of second command
-    create_file_cmd = format(
-      "{cleanup_cmd}; hadoop --config {hadoop_conf_dir} fs -put /etc/passwd {tmp_file}")
-    test_cmd = format("fs -test -e {tmp_file}")
     if params.security_enabled:
     if params.security_enabled:
       Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
       Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
         user=params.hdfs_user
         user=params.hdfs_user
@@ -58,39 +49,23 @@ class HdfsServiceCheckDefault(HdfsServiceCheck):
                   tries=20,
                   tries=20,
                   bin_dir=params.hadoop_bin_dir
                   bin_dir=params.hadoop_bin_dir
     )
     )
-    ExecuteHadoop(create_dir_cmd,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  not_if=test_dir_exists,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
+    params.HdfsResource(dir,
+                        type="directory",
+                        action="create_on_execute",
+                        mode=0777
     )
     )
-    ExecuteHadoop(chmod_command,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
+    params.HdfsResource(tmp_file,
+                        type="file",
+                        action="delete_on_execute",
     )
     )
-    ExecuteHadoop(create_file_cmd,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
-    )
-    ExecuteHadoop(test_cmd,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
+
+    params.HdfsResource(tmp_file,
+                        type="file",
+                        source="/etc/passwd",
+                        action="create_on_execute"
     )
     )
+    params.HdfsResource(None, action="execute")
+
     if params.has_journalnode_hosts:
     if params.has_journalnode_hosts:
       journalnode_port = params.journalnode_port
       journalnode_port = params.journalnode_port
       checkWebUIFileName = "checkWebUI.py"
       checkWebUIFileName = "checkWebUI.py"

+ 5 - 17
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh

@@ -23,10 +23,11 @@
 export ttonhost=$1
 export ttonhost=$1
 export smoke_test_user=$2
 export smoke_test_user=$2
 export templeton_port=$3
 export templeton_port=$3
-export smoke_user_keytab=$4
-export security_enabled=$5
-export kinit_path_local=$6
-export smokeuser_principal=$7
+export ttonTestScript=$4
+export smoke_user_keytab=$5
+export security_enabled=$6
+export kinit_path_local=$7
+export smokeuser_principal=$8
 export ttonurl="http://${ttonhost}:${templeton_port}/templeton/v1"
 export ttonurl="http://${ttonhost}:${templeton_port}/templeton/v1"
 
 
 if [[ $security_enabled == "true" ]]; then
 if [[ $security_enabled == "true" ]]; then
@@ -67,21 +68,8 @@ if [[ $security_enabled == "true" ]]; then
 fi
 fi
 
 
 #try pig query
 #try pig query
-outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
-ttonTestOutput="/tmp/idtest.${outname}.out";
-ttonTestInput="/tmp/idtest.${outname}.in";
 ttonTestScript="idtest.${outname}.pig"
 ttonTestScript="idtest.${outname}.pig"
 
 
-echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
-echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
-echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
-
-#copy pig script to hdfs
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
-
-#copy input file to hdfs
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
-
 #create, copy post args file
 #create, copy post args file
 echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
 echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
 
 

+ 89 - 27
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py

@@ -22,6 +22,7 @@ from resource_management import *
 from resource_management.libraries import functions
 from resource_management.libraries import functions
 import sys
 import sys
 import os
 import os
+import glob
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 from urlparse import urlparse
 from urlparse import urlparse
@@ -81,18 +82,96 @@ def hive(name=None):
 
 
   if name == 'hiveserver2':
   if name == 'hiveserver2':
 
 
-    params.HdfsDirectory(params.hive_apps_whs_dir,
-                         action="create_delayed",
-                         owner=params.hive_user,
-                         mode=0777
+    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >=0:
+      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.mapreduce_tar_source,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+        
+    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
+      params.HdfsResource(params.webhcat_apps_dir,
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.webhcat_user,
+                           mode=0755
+      )
+  
+    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+      params.HdfsResource(params.hcat_hdfs_user_dir,
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.hcat_user,
+                           mode=params.hcat_hdfs_user_mode
+      )
+    params.HdfsResource(params.webhcat_hdfs_user_dir,
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.webhcat_user,
+                         mode=params.webhcat_hdfs_user_mode
+    )
+  
+    for src_filepath in glob.glob(params.hadoop_streaming_tar_source):
+      src_filename = os.path.basename(src_filepath)
+      params.HdfsResource(InlineTemplate(params.hadoop_streaming_tar_destination_dir).get_content() + '/' + src_filename,
+                          type="file",
+                          action="create_on_execute",
+                          source=src_filepath,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+  
+    if (os.path.isfile(params.pig_tar_source)):
+      params.HdfsResource(InlineTemplate(params.pig_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.pig_tar_source,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+  
+    params.HdfsResource(InlineTemplate(params.hive_tar_destination).get_content(),
+                        type="file",
+                        action="create_on_execute",
+                        source=params.hive_tar_source,
+                        group=params.user_group,
+                        mode=params.tarballs_mode
+    )
+ 
+    for src_filepath in glob.glob(params.sqoop_tar_source):
+      src_filename = os.path.basename(src_filepath)
+      params.HdfsResource(InlineTemplate(params.sqoop_tar_destination_dir).get_content() + '/' + src_filename,
+                          type="file",
+                          action="create_on_execute",
+                          source=src_filepath,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+      
+    params.HdfsResource(params.hive_apps_whs_dir,
+                         type="directory",
+                          action="create_on_execute",
+                          owner=params.hive_user,
+                          mode=0777
     )
     )
-    params.HdfsDirectory(params.hive_hdfs_user_dir,
-                         action="create_delayed",
-                         owner=params.hive_user,
-                         mode=params.hive_hdfs_user_mode
+    params.HdfsResource(params.hive_hdfs_user_dir,
+                         type="directory",
+                          action="create_on_execute",
+                          owner=params.hive_user,
+                          mode=params.hive_hdfs_user_mode
     )
     )
-    setup_custom_scratchdir()
-    params.HdfsDirectory(None, action="create")
+    
+    if not is_empty(params.hive_exec_scratchdir) and not urlparse(params.hive_exec_scratchdir).path.startswith("/tmp"):
+      params.HdfsResource(params.hive_exec_scratchdir,
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.hive_user,
+                           group=params.hdfs_user,
+                           mode=0777) # Hive expects this dir to be writeable by everyone as it is used as a temp dir
+      
+    params.HdfsResource(None, action="execute")
 
 
   Directory(params.hive_etc_dir_prefix,
   Directory(params.hive_etc_dir_prefix,
             mode=0755
             mode=0755
@@ -284,20 +363,3 @@ def jdbc_connector():
   File(params.target,
   File(params.target,
        mode = 0644,
        mode = 0644,
   )
   )
-
-# In case Hive has a custom path for its HDFS temporary directory,
-# recursive directory creation will be a prerequisite as 'hive' user cannot write on the root of the HDFS
-def setup_custom_scratchdir():
-  import params
-  # If this property is custom and not a variation of the writable temp dir
-  if is_empty(params.hive_exec_scratchdir):
-    return
-  parsed = urlparse(params.hive_exec_scratchdir)
-  if parsed.path.startswith("/tmp"):
-    return
-  params.HdfsDirectory(params.hive_exec_scratchdir,
-                       action="create_delayed",
-                       owner=params.hive_user,
-                       group=params.hdfs_user,
-                       mode=0777) # Hive expects this dir to be writeable by everyone as it is used as a temp dir
-

+ 8 - 5
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -23,7 +23,6 @@ from resource_management import *
 from hive import hive
 from hive import hive
 from hive_service import hive_service
 from hive_service import hive_service
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
   FILE_TYPE_XML
@@ -71,8 +70,6 @@ class HiveServerDefault(HiveServer):
     env.set_params(params)
     env.set_params(params)
     self.configure(env) # FOR SECURITY
     self.configure(env) # FOR SECURITY
 
 
-    # This function is needed in HDP 2.2, but it is safe to call in earlier versions.
-    copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
     setup_ranger_hive()    
     setup_ranger_hive()    
     hive_service( 'hiveserver2', action = 'start',
     hive_service( 'hiveserver2', action = 'start',
       rolling_restart=rolling_restart )
       rolling_restart=rolling_restart )
@@ -102,8 +99,14 @@ class HiveServerDefault(HiveServer):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hive", params.version)
       Execute(format("hdp-select set hive-server2 {version}"))
       Execute(format("hdp-select set hive-server2 {version}"))
-      copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
-      copy_tarballs_to_hdfs('tez', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
+      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.mapreduce_tar_source,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+      params.HdfsResource(None, action="execute")
 
 
   def security_status(self, env):
   def security_status(self, env):
     import status_params
     import status_params

+ 50 - 13
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py

@@ -25,13 +25,14 @@ import os
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_commons.os_check import OSCheck
 from ambari_commons.os_check import OSCheck
 
 
+from resource_management import *
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
-from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+
 
 
 # server configurations
 # server configurations
 config = Script.get_config()
 config = Script.get_config()
@@ -45,9 +46,12 @@ hostname = config["hostname"]
 
 
 # This is expected to be of the form #.#.#.#
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
 stack_is_hdp21 = Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.2")
 stack_is_hdp21 = Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.2")
 
 
+# this is not avaliable on INSTALL action because hdp-select is not available
+hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
+
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
 
 
@@ -95,6 +99,41 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   # there are no client versions of these, use server versions directly
   # there are no client versions of these, use server versions directly
   hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
   hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
+  
+  # --- Tarballs ---
+
+  hive_tar_source = config['configurations']['cluster-env']['hive_tar_source']
+  pig_tar_source = config['configurations']['cluster-env']['pig_tar_source']
+  hadoop_streaming_tar_source = config['configurations']['cluster-env']['hadoop-streaming_tar_source']
+  sqoop_tar_source = config['configurations']['cluster-env']['sqoop_tar_source']
+  mapreduce_tar_source = config['configurations']['cluster-env']['mapreduce_tar_source']
+  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
+  
+  hive_tar_destination = config['configurations']['cluster-env']['hive_tar_destination_folder']  + "/" + os.path.basename(hive_tar_source)
+  pig_tar_destination = config['configurations']['cluster-env']['pig_tar_destination_folder'] + "/" + os.path.basename(pig_tar_source)
+  hadoop_streaming_tar_destination_dir = config['configurations']['cluster-env']['hadoop-streaming_tar_destination_folder']
+  sqoop_tar_destination = config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + "/" + os.path.basename(sqoop_tar_source)
+  mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
+  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
+
+  tarballs_mode = 0444
+else:
+  # --- Tarballs ---
+  hive_tar_source = hive_tar_file
+  pig_tar_source = pig_tar_file
+  hadoop_streaming_tar_source = hadoop_streeming_jars
+  sqoop_tar_source = sqoop_tar_file
+
+  webhcat_apps_dir = "/apps/webhcat"
+  
+  hive_tar_destination = webhcat_apps_dir + "/" + os.path.basename(hive_tar_source)
+  pig_tar_destination = webhcat_apps_dir + "/" + os.path.basename(pig_tar_source)
+  hadoop_streaming_tar_destination_dir = webhcat_apps_dir
+  sqoop_tar_destination_dir = webhcat_apps_dir
+
+  tarballs_mode = 0755
+
+
 
 
 
 
 execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
 execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
@@ -276,7 +315,6 @@ tez_user = config['configurations']['tez-env']['tez_user']
 # Tez jars
 # Tez jars
 tez_local_api_jars = '/usr/lib/tez/tez*.jar'
 tez_local_api_jars = '/usr/lib/tez/tez*.jar'
 tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
 tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
-app_dir_files = {tez_local_api_jars:None}
 
 
 # Tez libraries
 # Tez libraries
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
@@ -320,8 +358,6 @@ templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
 
 
 webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
 webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
 
 
-webhcat_apps_dir = "/apps/webhcat"
-
 hcat_hdfs_user_dir = format("/user/{hcat_user}")
 hcat_hdfs_user_dir = format("/user/{hcat_user}")
 hcat_hdfs_user_mode = 0755
 hcat_hdfs_user_mode = 0755
 webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
 webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
@@ -330,17 +366,18 @@ webhcat_hdfs_user_mode = 0755
 security_param = "true" if security_enabled else "false"
 security_param = "true" if security_enabled else "false"
 
 
 import functools
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir = hadoop_conf_dir,
-  hdfs_user = hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+ HdfsResource,
+  user = hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
+
 
 
 # ranger host
 # ranger host
 ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
 ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])

+ 0 - 71
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py

@@ -20,11 +20,9 @@ Ambari Agent
 """
 """
 import sys
 import sys
 import os.path
 import os.path
-import glob
 from resource_management import *
 from resource_management import *
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.version import compare_versions
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 
 
@@ -46,26 +44,6 @@ def webhcat():
 def webhcat():
 def webhcat():
   import params
   import params
 
 
-  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, "2.2.0.0") < 0:
-    params.HdfsDirectory(params.webhcat_apps_dir,
-                         action="create_delayed",
-                         owner=params.webhcat_user,
-                         mode=0755
-    )
-  
-  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-    params.HdfsDirectory(params.hcat_hdfs_user_dir,
-                         action="create_delayed",
-                         owner=params.hcat_user,
-                         mode=params.hcat_hdfs_user_mode
-    )
-  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=params.webhcat_hdfs_user_mode
-  )
-  params.HdfsDirectory(None, action="create")
-
   Directory(params.templeton_pid_dir,
   Directory(params.templeton_pid_dir,
             owner=params.webhcat_user,
             owner=params.webhcat_user,
             mode=0755,
             mode=0755,
@@ -94,55 +72,6 @@ def webhcat():
             path='/bin'
             path='/bin'
     )
     )
 
 
-  # TODO, these checks that are specific to HDP 2.2 and greater should really be in a script specific to that stack.
-  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, "2.2.0.0") >= 0:
-    copy_tarballs_to_hdfs('hive', 'hive-webhcat', params.webhcat_user, params.hdfs_user, params.user_group)
-    copy_tarballs_to_hdfs('pig', 'hive-webhcat', params.webhcat_user, params.hdfs_user, params.user_group)
-    copy_tarballs_to_hdfs('hadoop-streaming', 'hive-webhcat', params.webhcat_user, params.hdfs_user, params.user_group)
-    copy_tarballs_to_hdfs('sqoop', 'hive-webhcat', params.webhcat_user, params.hdfs_user, params.user_group)
-  else:
-    CopyFromLocal(params.hadoop_streeming_jars,
-                  owner=params.webhcat_user,
-                  mode=0755,
-                  dest_dir=params.webhcat_apps_dir,
-                  kinnit_if_needed=kinit_if_needed,
-                  hdfs_user=params.hdfs_user,
-                  hadoop_bin_dir=params.hadoop_bin_dir,
-                  hadoop_conf_dir=params.hadoop_conf_dir
-    )
-
-    if (os.path.isfile(params.pig_tar_file)):
-      CopyFromLocal(params.pig_tar_file,
-                    owner=params.webhcat_user,
-                    mode=0755,
-                    dest_dir=params.webhcat_apps_dir,
-                    kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user,
-                    hadoop_bin_dir=params.hadoop_bin_dir,
-                    hadoop_conf_dir=params.hadoop_conf_dir
-      )
-
-    CopyFromLocal(params.hive_tar_file,
-                  owner=params.webhcat_user,
-                  mode=0755,
-                  dest_dir=params.webhcat_apps_dir,
-                  kinnit_if_needed=kinit_if_needed,
-                  hdfs_user=params.hdfs_user,
-                  hadoop_bin_dir=params.hadoop_bin_dir,
-                  hadoop_conf_dir=params.hadoop_conf_dir
-    )
-
-    if (len(glob.glob(params.sqoop_tar_file)) > 0):
-      CopyFromLocal(params.sqoop_tar_file,
-                    owner=params.webhcat_user,
-                    mode=0755,
-                    dest_dir=params.webhcat_apps_dir,
-                    kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user,
-                    hadoop_bin_dir=params.hadoop_bin_dir,
-                    hadoop_conf_dir=params.hadoop_conf_dir
-      )
-
   # Replace _HOST with hostname in relevant principal-related properties
   # Replace _HOST with hostname in relevant principal-related properties
   webhcat_site = params.config['configurations']['webhcat-site'].copy()
   webhcat_site = params.config['configurations']['webhcat-site'].copy()
   for prop_name in ['templeton.hive.properties', 'templeton.kerberos.principal']:
   for prop_name in ['templeton.hive.properties', 'templeton.kerberos.principal']:

+ 27 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py

@@ -21,6 +21,7 @@ limitations under the License.
 from resource_management import *
 from resource_management import *
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 from ambari_commons import OSConst
+import time
 
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def webhcat_service_check():
 def webhcat_service_check():
@@ -44,8 +45,33 @@ def webhcat_service_check():
   else:
   else:
     smokeuser_keytab= "no_keytab"
     smokeuser_keytab= "no_keytab"
     smoke_user_principal="no_principal"
     smoke_user_principal="no_principal"
+    
+  unique_name = format("{smokeuser}.{timestamp}", timestamp = time.time())
+  templeton_test_script = format("idtest.{unique_name}.pig")
+  templeton_test_input = format("/tmp/idtest.{unique_name}.in")
+  templeton_test_output = format("/tmp/idtest.{unique_name}.out")
 
 
-  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {templeton_port} {smokeuser_keytab}"
+  File(format("{tmp_dir}/{templeton_test_script}"),
+       content = Template("templeton_smoke.pig.j2", templeton_test_input=templeton_test_input, templeton_test_output=templeton_test_output),
+  )
+  
+  params.HdfsResource(format("/tmp/{templeton_test_script}"),
+                      action = "create_on_execute",
+                      type = "file",
+                      source = format("{tmp_dir}/{templeton_test_script}"),
+                      owner = params.smokeuser
+  )
+  
+  params.HdfsResource(templeton_test_input,
+                      action = "create_on_execute",
+                      type = "file",
+                      source = "/etc/passwd",
+                      owner = params.smokeuser
+  )
+  
+  params.HdfsResource(None, action = "execute")
+
+  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {templeton_port} {templeton_test_script} {smokeuser_keytab}"
                " {security_param} {kinit_path_local} {smoke_user_principal}")
                " {security_param} {kinit_path_local} {smoke_user_principal}")
 
 
   Execute(cmd,
   Execute(cmd,

+ 24 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2

@@ -0,0 +1,24 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+A = load '{{templeton_test_input}}' using PigStorage(':');
+B = foreach A generate \$0 as id; 
+store B into '{{templeton_test_output}}';

+ 9 - 8
ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py

@@ -18,13 +18,14 @@ limitations under the License.
 Ambari Agent
 Ambari Agent
 
 
 """
 """
+from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+
 
 
 # server configurations
 # server configurations
 config = Script.get_config()
 config = Script.get_config()
@@ -68,14 +69,14 @@ java64_home = config['hostLevelParams']['java_home']
 log4j_props = config['configurations']['mahout-log4j']['content']
 log4j_props = config['configurations']['mahout-log4j']['content']
 
 
 import functools
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 )

+ 14 - 35
ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py

@@ -20,55 +20,34 @@ Ambari Agent
 """
 """
 
 
 from resource_management import *
 from resource_management import *
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 
 
 class MahoutServiceCheck(Script):
 class MahoutServiceCheck(Script):
   def service_check(self, env):
   def service_check(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    create_input_dir_cmd = format("fs -mkdir /user/{smokeuser}/mahoutsmokeinput")
-    copy_test_file_to_hdfs_cmd = format("fs -put {tmp_dir}/sample-mahout-test.txt /user/{smokeuser}/mahoutsmokeinput/")
     mahout_command = format("mahout seqdirectory --input /user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt "
     mahout_command = format("mahout seqdirectory --input /user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt "
                             "--output /user/{smokeuser}/mahoutsmokeoutput/ --charset utf-8")
                             "--output /user/{smokeuser}/mahoutsmokeoutput/ --charset utf-8")
     test_command = format("fs -test -e /user/{smokeuser}/mahoutsmokeoutput/_SUCCESS")
     test_command = format("fs -test -e /user/{smokeuser}/mahoutsmokeoutput/_SUCCESS")
-    remove_output_input_dirs_cmd = format("fs -rm -r -f /user/{smokeuser}/mahoutsmokeoutput "
-                                          "/user/{smokeuser}/mahoutsmokeinput")
-
-    ExecuteHadoop( remove_output_input_dirs_cmd,
-                   tries = 3,
-                   try_sleep = 5,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   # for kinit run
-                   keytab = params.smoke_user_keytab,
-                   principal = params.smokeuser_principal,
-                   security_enabled = params.security_enabled,
-                   kinit_path_local = params.kinit_path_local,
-                   bin_dir = params.hadoop_bin_dir
-                   )
-
-    ExecuteHadoop( create_input_dir_cmd,
-                 tries = 3,
-                 try_sleep = 5,
-                 user = params.smokeuser,
-                 conf_dir = params.hadoop_conf_dir,
-                 bin_dir = params.hadoop_bin_dir
-    )
-
+    
     File( format("{tmp_dir}/sample-mahout-test.txt"),
     File( format("{tmp_dir}/sample-mahout-test.txt"),
         content = "Test text which will be converted to sequence file.",
         content = "Test text which will be converted to sequence file.",
         mode = 0755
         mode = 0755
     )
     )
-
-    ExecuteHadoop( copy_test_file_to_hdfs_cmd,
-                   tries = 3,
-                   try_sleep = 5,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   bin_dir = params.hadoop_bin_dir
+    
+    params.HdfsResource(format("/user/{smokeuser}/mahoutsmokeinput"),
+                        action="create_on_execute",
+                        type="directory",
+                        owner=params.smokeuser,
     )
     )
-
+    params.HdfsResource(format("/user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt"),
+                        action="create_on_execute",
+                        type="file",
+                        owner=params.smokeuser,
+                        source=format("{tmp_dir}/sample-mahout-test.txt")
+    )
+    params.HdfsResource(None, action="execute")
+    
     Execute( mahout_command,
     Execute( mahout_command,
              tries = 3,
              tries = 3,
              try_sleep = 5,
              try_sleep = 5,

+ 10 - 42
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh

@@ -24,13 +24,14 @@ export os_family=$1
 export oozie_lib_dir=$2
 export oozie_lib_dir=$2
 export oozie_conf_dir=$3
 export oozie_conf_dir=$3
 export oozie_bin_dir=$4
 export oozie_bin_dir=$4
-export hadoop_conf_dir=$5
-export hadoop_bin_dir=$6
-export smoke_test_user=$7
-export security_enabled=$8
-export smoke_user_keytab=$9
-export kinit_path_local=${10}
-export smokeuser_principal=${11}
+export oozie_examples_dir=$5
+export hadoop_conf_dir=$6
+export hadoop_bin_dir=$7
+export smoke_test_user=$8
+export security_enabled=$9
+export smoke_user_keytab=$10
+export kinit_path_local=${11}
+export smokeuser_principal=${12}
 
 
 function getValueFromField {
 function getValueFromField {
   xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | cut -d "<" -f1
   xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | cut -d "<" -f1
@@ -67,37 +68,9 @@ function checkOozieJobStatus {
 }
 }
 
 
 export OOZIE_EXIT_CODE=0
 export OOZIE_EXIT_CODE=0
-export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml yarn.resourcemanager.address`
-export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.defaultFS`
 export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml oozie.base.url | tr '[:upper:]' '[:lower:]'`
 export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml oozie.base.url | tr '[:upper:]' '[:lower:]'`
 
 
-# search for the oozie examples JAR and, if found, store the directory name
-export OOZIE_EXAMPLES_DIR=`find "${oozie_lib_dir}/" -name "oozie-examples.tar.gz" | xargs dirname`
-if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
-  if [ "$os_family" == "ubuntu" ] ; then
-    LIST_PACKAGE_FILES_CMD='dpkg-query -L'
-  else
-    LIST_PACKAGE_FILES_CMD='rpm -ql'
-  fi
-  export OOZIE_EXAMPLES_DIR=`$LIST_PACKAGE_FILES_CMD oozie-client | grep 'oozie-examples.tar.gz$' | xargs dirname`
-fi
-if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
-  export OOZIE_EXAMPLES_DIR='/usr/hdp/current/oozie-client/doc/'
-else
-  echo "Located Oozie examples JAR at $OOZIE_EXAMPLES_DIR"
-fi
-
-cd $OOZIE_EXAMPLES_DIR
-
-/var/lib/ambari-agent/ambari-sudo.sh tar -zxf oozie-examples.tar.gz
-/var/lib/ambari-agent/ambari-sudo.sh chmod -R o+rx examples
-
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties
+cd $oozie_examples_dir
 
 
 if [[ $security_enabled == "True" ]]; then
 if [[ $security_enabled == "True" ]]; then
   kinitcmd="${kinit_path_local} -kt ${smoke_user_keytab} ${smokeuser_principal}; "
   kinitcmd="${kinit_path_local} -kt ${smoke_user_keytab} ${smokeuser_principal}; "
@@ -105,12 +78,7 @@ else
   kinitcmd=""
   kinitcmd=""
 fi
 fi
 
 
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r examples"
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r input-data"
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
-
-cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie -Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config $OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
+cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie -Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config $oozie_examples_dir/examples/apps/map-reduce/job.properties  -run"
 echo $cmd
 echo $cmd
 job_info=`/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "$cmd" | grep "job:"`
 job_info=`/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "$cmd" | grep "job:"`
 job_id="`echo $job_info | cut -d':' -f2`"
 job_id="`echo $job_info | cut -d':' -f2`"

+ 45 - 0
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh

@@ -0,0 +1,45 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+export oozie_conf_dir=$1
+export oozie_examples_dir=$2
+export hadoop_conf_dir=$3
+
+function getValueFromField {
+  xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | cut -d "<" -f1
+  return $?
+}
+
+export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml yarn.resourcemanager.address`
+export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.defaultFS`
+
+cd $oozie_examples_dir
+
+/var/lib/ambari-agent/ambari-sudo.sh tar -zxf oozie-examples.tar.gz
+/var/lib/ambari-agent/ambari-sudo.sh chmod -R o+rx examples
+
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties

+ 4 - 2
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py

@@ -85,11 +85,13 @@ def oozie(is_server=False):
   import params
   import params
 
 
   if is_server:
   if is_server:
-    params.HdfsDirectory(params.oozie_hdfs_user_dir,
-                         action="create",
+    params.HdfsResource(params.oozie_hdfs_user_dir,
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.oozie_user,
                          owner=params.oozie_user,
                          mode=params.oozie_hdfs_user_mode
                          mode=params.oozie_hdfs_user_mode
     )
     )
+    params.HdfsResource(None, action="execute")
   Directory(params.conf_dir,
   Directory(params.conf_dir,
              recursive = True,
              recursive = True,
              owner = params.oozie_user,
              owner = params.oozie_user,

+ 10 - 6
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py

@@ -185,12 +185,16 @@ def upgrade_oozie():
     command = format("{kinit_path_local} -kt {oozie_keytab} {oozie_principal_with_host}")
     command = format("{kinit_path_local} -kt {oozie_keytab} {oozie_principal_with_host}")
     Execute(command, user=params.oozie_user)
     Execute(command, user=params.oozie_user)
 
 
-  # ensure that HDFS is prepared to receive the new sharelib
-  command = format("hdfs dfs -chown oozie:hadoop {oozie_hdfs_user_dir}/share")
-  Execute(command, user=params.oozie_user)
-
-  command = format("hdfs dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
-  Execute(command, user=params.oozie_user)
+  
+  params.HdfsResource(format("{oozie_hdfs_user_dir}/share"),
+                      action = "create_on_execute",
+                      type = "directory",
+                      owner = "oozie",
+                      group = "hadoop",
+                      mode = 0755,
+                      recursive_chmod = True
+  )
+  params.HdfsResource(None, action = "execute")
 
 
   # upgrade oozie DB
   # upgrade oozie DB
   command = format("{oozie_home}/bin/ooziedb.sh upgrade -run")
   command = format("{oozie_home}/bin/ooziedb.sh upgrade -run")

+ 13 - 9
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py

@@ -17,6 +17,7 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 
 
 """
 """
+from resource_management import *
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
@@ -25,7 +26,7 @@ from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_port_from_url
 from resource_management.libraries.functions import get_port_from_url
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+
 from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
 from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
 
 
 import status_params
 import status_params
@@ -68,6 +69,7 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   oozie_shared_lib = format("/usr/hdp/current/{oozie_root}/share")
   oozie_shared_lib = format("/usr/hdp/current/{oozie_root}/share")
   oozie_home = format("/usr/hdp/current/{oozie_root}")
   oozie_home = format("/usr/hdp/current/{oozie_root}")
   oozie_bin_dir = format("/usr/hdp/current/{oozie_root}/bin")
   oozie_bin_dir = format("/usr/hdp/current/{oozie_root}/bin")
+  oozie_examples_regex = format("/usr/hdp/current/{oozie_root}/doc")
   falcon_home = '/usr/hdp/current/falcon-client'
   falcon_home = '/usr/hdp/current/falcon-client'
 
 
   conf_dir = format("/usr/hdp/current/{oozie_root}/conf")
   conf_dir = format("/usr/hdp/current/{oozie_root}/conf")
@@ -88,6 +90,7 @@ else:
   falcon_home = '/usr/lib/falcon'
   falcon_home = '/usr/lib/falcon'
   conf_dir = "/etc/oozie/conf"
   conf_dir = "/etc/oozie/conf"
   hive_conf_dir = "/etc/oozie/conf/action-conf/hive"
   hive_conf_dir = "/etc/oozie/conf/action-conf/hive"
+  oozie_examples_regex = "/usr/share/doc/oozie-*"
 
 
 execute_path = oozie_bin_dir + os.pathsep + hadoop_bin_dir
 execute_path = oozie_bin_dir + os.pathsep + hadoop_bin_dir
 
 
@@ -195,17 +198,18 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
+
 
 
 # The logic for LZO also exists in HDFS' params.py
 # The logic for LZO also exists in HDFS' params.py
 io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
 io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)

+ 31 - 7
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py

@@ -26,6 +26,7 @@ from resource_management.libraries.script import Script
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 import os
 import os
+import glob
 
 
 
 
 class OozieServiceCheck(Script):
 class OozieServiceCheck(Script):
@@ -39,30 +40,53 @@ class OozieServiceCheckDefault(OozieServiceCheck):
     env.set_params(params)
     env.set_params(params)
 
 
     # on HDP1 this file is different
     # on HDP1 this file is different
+    prepare_hdfs_file_name = 'prepareOozieHdfsDirectories.sh'
     smoke_test_file_name = 'oozieSmoke2.sh'
     smoke_test_file_name = 'oozieSmoke2.sh'
 
 
-    OozieServiceCheckDefault.oozie_smoke_shell_file(smoke_test_file_name)
+    OozieServiceCheckDefault.oozie_smoke_shell_file(smoke_test_file_name, prepare_hdfs_file_name)
 
 
   @staticmethod
   @staticmethod
-  def oozie_smoke_shell_file(file_name):
+  def oozie_smoke_shell_file(file_name, prepare_hdfs_file_name):
     import params
     import params
 
 
     File(format("{tmp_dir}/{file_name}"),
     File(format("{tmp_dir}/{file_name}"),
          content=StaticFile(file_name),
          content=StaticFile(file_name),
          mode=0755
          mode=0755
     )
     )
+    File(format("{tmp_dir}/{prepare_hdfs_file_name}"),
+         content=StaticFile(prepare_hdfs_file_name),
+         mode=0755
+    )
 
 
     os_family = System.get_instance().os_family
     os_family = System.get_instance().os_family
+    oozie_examples_dir = glob.glob(params.oozie_examples_regex)[0]
+    
+    Execute(format("{tmp_dir}/{prepare_hdfs_file_name} {conf_dir} {oozie_examples_dir} {hadoop_conf_dir} "),
+            tries=3,
+            try_sleep=5,
+            logoutput=True
+    )
+    
+    params.HdfsResource(format('/user/{smokeuser}/examples'),
+      action = "create_on_execute",
+      type = "directory",
+      source = format("{oozie_examples_dir}/examples"),
+    )
+    params.HdfsResource(format('/user/{smokeuser}/input-data'),
+      action = "create_on_execute",
+      type = "directory",
+      source = format("{oozie_examples_dir}/examples/input-data"),
+    )
+    params.HdfsResource(None, action="execute")
 
 
     if params.security_enabled:
     if params.security_enabled:
       sh_cmd = format(
       sh_cmd = format(
-        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} {oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local} {smokeuser_principal}")
+        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} {oozie_bin_dir} {oozie_examples_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local} {smokeuser_principal}")
     else:
     else:
       sh_cmd = format(
       sh_cmd = format(
-        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} {oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} {security_enabled}")
-
-    Execute(format("{tmp_dir}/{file_name}"),
-            command=sh_cmd,
+        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} {oozie_bin_dir} {oozie_examples_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} {security_enabled}")
+    
+    Execute(sh_cmd,
             path=params.execute_path,
             path=params.execute_path,
             tries=3,
             tries=3,
             try_sleep=5,
             try_sleep=5,

+ 16 - 9
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py

@@ -18,13 +18,15 @@ limitations under the License.
 Ambari Agent
 Ambari Agent
 
 
 """
 """
+from resource_management import *
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+
+import os
 
 
 # server configurations
 # server configurations
 config = Script.get_config()
 config = Script.get_config()
@@ -51,6 +53,10 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
+  
+  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
+  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
+
 
 
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -71,14 +77,15 @@ pig_properties = config['configurations']['pig-properties']['content']
 log4j_props = config['configurations']['pig-log4j']['content']
 log4j_props = config['configurations']['pig-log4j']['content']
 
 
 import functools
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
+

+ 35 - 32
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py

@@ -20,7 +20,6 @@ Ambari Agent
 """
 """
 
 
 from resource_management import *
 from resource_management import *
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries import functions
 from resource_management.libraries import functions
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -34,26 +33,23 @@ class PigServiceCheckLinux(PigServiceCheck):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    input_file = 'passwd'
-    output_file = "pigsmoke.out"
-
-    cleanup_cmd = format("dfs -rmr {output_file} {input_file}")
-    #cleanup put below to handle retries; if retrying there wil be a stale file that needs cleanup; exit code is fn of second command
-    create_file_cmd = format("{cleanup_cmd}; hadoop --config {hadoop_conf_dir} dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command needs hadoop
-    test_cmd = format("fs -test -e {output_file}")
-
-    ExecuteHadoop( create_file_cmd,
-      tries     = 3,
-      try_sleep = 5,
-      user      = params.smokeuser,
-      conf_dir = params.hadoop_conf_dir,
-      # for kinit run
-      keytab = params.smoke_user_keytab,
-      principal = params.smokeuser_principal,
-      security_enabled = params.security_enabled,
-      kinit_path_local = params.kinit_path_local,
-      bin_dir = params.hadoop_bin_dir
+    input_file = format('/user/{smokeuser}/passwd')
+    output_dir = format('/user/{smokeuser}/pigsmoke.out')
+
+    params.HdfsResource(output_dir,
+                        type="directory",
+                        action="delete_on_execute",
+                        user=params.smokeuser,
+                        )
+    params.HdfsResource(input_file,
+                        type="file",
+                        source="/etc/passwd",
+                        action="create_on_execute",
+                        user=params.smokeuser,
     )
     )
+    params.HdfsResource(None, action="execute")
+ 
+
 
 
     File( format("{tmp_dir}/pigSmoke.sh"),
     File( format("{tmp_dir}/pigSmoke.sh"),
       content = StaticFile("pigSmoke.sh"),
       content = StaticFile("pigSmoke.sh"),
@@ -68,6 +64,7 @@ class PigServiceCheckLinux(PigServiceCheck):
       user      = params.smokeuser
       user      = params.smokeuser
     )
     )
 
 
+    test_cmd = format("fs -test -e {output_dir}")
     ExecuteHadoop( test_cmd,
     ExecuteHadoop( test_cmd,
       user      = params.smokeuser,
       user      = params.smokeuser,
       conf_dir = params.hadoop_conf_dir,
       conf_dir = params.hadoop_conf_dir,
@@ -76,21 +73,27 @@ class PigServiceCheckLinux(PigServiceCheck):
 
 
     if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
     if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
       # cleanup results from previous test
       # cleanup results from previous test
-      ExecuteHadoop( create_file_cmd,
-        tries     = 3,
-        try_sleep = 5,
-        user      = params.smokeuser,
-        conf_dir = params.hadoop_conf_dir,
-        # for kinit run
-        keytab = params.smoke_user_keytab,
-        principal = params.smokeuser_principal,
-        security_enabled = params.security_enabled,
-        kinit_path_local = params.kinit_path_local,
-        bin_dir = params.hadoop_bin_dir
+      params.HdfsResource(output_dir,
+                          type="directory",
+                          action="delete_on_execute",
+                          user=params.smokeuser,
+      )
+      params.HdfsResource(input_file,
+                          type="file",
+                          source="/etc/passwd",
+                          action="create_on_execute",
+                          user=params.smokeuser,
       )
       )
 
 
       # Check for Pig-on-Tez
       # Check for Pig-on-Tez
-      copy_tarballs_to_hdfs('tez', 'hadoop-client', params.smokeuser, params.hdfs_user, params.user_group)
+      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.tez_tar_source,
+                          group=params.user_group,
+                          owner=params.hdfs_user
+      )
+      params.HdfsResource(None, action="execute")
 
 
       if params.security_enabled:
       if params.security_enabled:
         kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
         kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")

+ 8 - 2
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py

@@ -22,7 +22,6 @@ import sys
 import os
 import os
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.core.resources import Execute
 from resource_management.core.resources import Execute
@@ -77,7 +76,14 @@ class JobHistoryServer(Script):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "spark", params.version)
       conf_select.select(params.stack_name, "spark", params.version)
       Execute(format("hdp-select set spark-historyserver {version}"))
       Execute(format("hdp-select set spark-historyserver {version}"))
-      copy_tarballs_to_hdfs('tez', 'spark-historyserver', params.spark_user, params.hdfs_user, params.user_group)
+      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.tez_tar_source,
+                          group=params.user_group,
+                          owner=params.hdfs_user
+      )
+      params.HdfsResource(None, action="execute")
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
   JobHistoryServer().execute()
   JobHistoryServer().execute()

+ 12 - 9
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py

@@ -23,6 +23,7 @@ import status_params
 
 
 from setup_spark import *
 from setup_spark import *
 
 
+from resource_management import *
 import resource_management.libraries.functions
 import resource_management.libraries.functions
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
@@ -30,7 +31,7 @@ from resource_management.libraries.functions.version import format_hdp_stack_ver
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+
 
 
 # a map of the Ambari role to the component name
 # a map of the Ambari role to the component name
 # for use with /usr/hdp/current/<component>
 # for use with /usr/hdp/current/<component>
@@ -69,6 +70,8 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_pid_dir = status_params.spark_pid_dir
   spark_pid_dir = status_params.spark_pid_dir
   spark_home = format("/usr/hdp/current/{component_directory}")
   spark_home = format("/usr/hdp/current/{component_directory}")
+  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
+  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
 
 
 
 
 java_home = config['hostLevelParams']['java_home']
 java_home = config['hostLevelParams']['java_home']
@@ -155,14 +158,14 @@ if security_enabled:
 
 
 
 
 import functools
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )

+ 4 - 2
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py

@@ -37,11 +37,13 @@ def setup_spark(env, type, action = None):
             recursive=True
             recursive=True
   )
   )
   if type == 'server' and action == 'config':
   if type == 'server' and action == 'config':
-    params.HdfsDirectory(params.spark_hdfs_user_dir,
-                       action="create",
+    params.HdfsResource(params.spark_hdfs_user_dir,
+                       type="directory",
+                       action="create_on_execute",
                        owner=params.spark_user,
                        owner=params.spark_user,
                        mode=0775
                        mode=0775
     )
     )
+    params.HdfsResource(None, action="execute")
     
     
   PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
   PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
     properties = params.config['configurations']['spark-defaults'],
     properties = params.config['configurations']['spark-defaults'],

+ 8 - 1
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py

@@ -27,7 +27,14 @@ def spark_service(action):
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal}; ")
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal}; ")
       Execute(spark_kinit_cmd, user=params.spark_user)
       Execute(spark_kinit_cmd, user=params.spark_user)
 
 
-    copy_tarballs_to_hdfs('tez', 'spark-historyserver', params.spark_user, params.hdfs_user, params.user_group)
+      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.tez_tar_source,
+                          group=params.user_group,
+                          owner=params.hdfs_user
+      )
+      params.HdfsResource(None, action="execute")
 
 
     no_op_test = format(
     no_op_test = format(
       'ls {spark_history_server_pid_file} >/dev/null 2>&1 && ps -p `cat {spark_history_server_pid_file}` >/dev/null 2>&1')
       'ls {spark_history_server_pid_file} >/dev/null 2>&1 && ps -p `cat {spark_history_server_pid_file}` >/dev/null 2>&1')

+ 5 - 7
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml

@@ -62,12 +62,10 @@ export STORM_LOG_DIR={{log_dir}}
     </value>
     </value>
   </property>
   </property>
 
 
-  <configuration>
-    <property>
-      <name>nimbus_seeds_supported</name>
-      <value>false</value>
-      <description></description>
-    </property>
-  </configuration>
+   <property>
+     <name>nimbus_seeds_supported</name>
+     <value>false</value>
+     <description></description>
+   </property>
 
 
 </configuration>
 </configuration>

+ 14 - 11
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py

@@ -19,13 +19,14 @@ limitations under the License.
 """
 """
 import os
 import os
 
 
+from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+
 
 
 # server configurations
 # server configurations
 config = Script.get_config()
 config = Script.get_config()
@@ -75,15 +76,17 @@ user_group = config['configurations']['cluster-env']['user_group']
 tez_env_sh_template = config['configurations']['tez-env']['content']
 tez_env_sh_template = config['configurations']['tez-env']['content']
 
 
 import functools
 import functools
-# Create partial functions with common arguments for every HdfsDirectory call
-# to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
-  security_enabled=security_enabled,
-  keytab=hdfs_user_keytab,
-  kinit_path_local=kinit_path_local,
-  bin_dir=hadoop_bin_dir
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete/copyfromlocal hdfs directories/files we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 )
 
 
+
+

+ 13 - 40
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py

@@ -21,7 +21,6 @@ Ambari Agent
 
 
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.version import compare_versions
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
 
 
@@ -38,53 +37,27 @@ class TezServiceCheckLinux(TezServiceCheck):
       hdp_version = functions.get_hdp_version("hadoop-client")
       hdp_version = functions.get_hdp_version("hadoop-client")
 
 
     path_to_tez_jar = format(params.path_to_tez_examples_jar)
     path_to_tez_jar = format(params.path_to_tez_examples_jar)
-    copy_test_file_to_hdfs_cmd =  format("fs -put {tmp_dir}/sample-tez-test /tmp/tezsmokeinput/")
-    create_input_dir_cmd = format("fs -mkdir /tmp/tezsmokeinput")
     wordcount_command = format("jar {path_to_tez_jar} orderedwordcount "
     wordcount_command = format("jar {path_to_tez_jar} orderedwordcount "
                                "/tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/")
                                "/tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/")
     test_command = format("fs -test -e /tmp/tezsmokeoutput/_SUCCESS")
     test_command = format("fs -test -e /tmp/tezsmokeoutput/_SUCCESS")
-    remove_output_input_dirs_cmd = "fs -rm -r -f /tmp/tezsmokeinput /tmp/tezsmokeoutput"
-
-
-    ExecuteHadoop( remove_output_input_dirs_cmd,
-                   tries = 3,
-                   try_sleep = 5,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   # for kinit run
-                   keytab = params.smoke_user_keytab,
-                   principal = params.smokeuser_principal,
-                   security_enabled = params.security_enabled,
-                   kinit_path_local = params.kinit_path_local,
-                   bin_dir = params.hadoop_bin_dir
-    )
-
-    params.HdfsDirectory("/tmp",
-                         action="create",
-                         owner=params.hdfs_user,
-                         mode=0777
-    )
-
-    ExecuteHadoop( create_input_dir_cmd,
-                   tries = 3,
-                   try_sleep = 5,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   bin_dir = params.hadoop_bin_dir
-    )
-
+    
     File( format("{tmp_dir}/sample-tez-test"),
     File( format("{tmp_dir}/sample-tez-test"),
           content = "foo\nbar\nfoo\nbar\nfoo",
           content = "foo\nbar\nfoo\nbar\nfoo",
           mode = 0755
           mode = 0755
     )
     )
-
-    ExecuteHadoop( copy_test_file_to_hdfs_cmd,
-                   tries = 3,
-                   try_sleep = 5,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   bin_dir = params.hadoop_bin_dir
+    
+    params.HdfsResource("/tmp/tezsmokeinput",
+                        action="create_on_execute",
+                        type="directory",
+                        owner=params.smokeuser,
+    )
+    params.HdfsResource("/tmp/tezsmokeinput/sample-tez-test",
+                        action="create_on_execute",
+                        type="file",
+                        owner=params.smokeuser,
+                        source=format("{tmp_dir}/sample-tez-test"),
     )
     )
+    params.HdfsResource(None, action="execute")
 
 
     ExecuteHadoop( wordcount_command,
     ExecuteHadoop( wordcount_command,
                    tries = 3,
                    tries = 3,

+ 22 - 3
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py

@@ -21,7 +21,6 @@ Ambari Agent
 
 
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
@@ -73,13 +72,33 @@ class HistoryServerDefault(HistoryServer):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
       Execute(format("hdp-select set hadoop-mapreduce-historyserver {version}"))
       Execute(format("hdp-select set hadoop-mapreduce-historyserver {version}"))
-      copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
+      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.mapreduce_tar_source,
+                          owner=params.hdfs_user,
+                          group=params.user_group,
+                          mode=0444,
+      )
+      params.HdfsResource(None, action="execute")
+
 
 
   def start(self, env, rolling_restart=False):
   def start(self, env, rolling_restart=False):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
     self.configure(env) # FOR SECURITY
     self.configure(env) # FOR SECURITY
-    copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
+    
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.mapreduce_tar_source,
+                          owner=params.hdfs_user,
+                          group=params.user_group,
+                          mode=0444,
+      )
+      params.HdfsResource(None, action="execute")
+
     service('historyserver', action='start', serviceName='mapreduce')
     service('historyserver', action='start', serviceName='mapreduce')
 
 
   def status(self, env):
   def status(self, env):

+ 30 - 38
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py

@@ -20,6 +20,7 @@ limitations under the License.
 
 
 from resource_management import *
 from resource_management import *
 import os
 import os
+import glob
 
 
 def install_tez_jars():
 def install_tez_jars():
   import params
   import params
@@ -29,25 +30,12 @@ def install_tez_jars():
   # If tez libraries are to be stored in hdfs
   # If tez libraries are to be stored in hdfs
   if destination_hdfs_dirs:
   if destination_hdfs_dirs:
     for hdfs_dir in destination_hdfs_dirs:
     for hdfs_dir in destination_hdfs_dirs:
-      params.HdfsDirectory(hdfs_dir,
-                           action="create_delayed",
+      params.HdfsResource(hdfs_dir,
+                           type="directory",
+                           action="create_on_execute",
                            owner=params.tez_user,
                            owner=params.tez_user,
                            mode=0755
                            mode=0755
       )
       )
-    pass
-    params.HdfsDirectory(None, action="create")
-
-    if params.security_enabled:
-      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
-    else:
-      kinit_if_needed = ""
-
-    if kinit_if_needed:
-      Execute(kinit_if_needed,
-              user=params.tez_user,
-              path='/bin'
-      )
-    pass
 
 
     app_dir_path = None
     app_dir_path = None
     lib_dir_path = None
     lib_dir_path = None
@@ -62,30 +50,34 @@ def install_tez_jars():
       pass
       pass
     pass
     pass
 
 
+    tez_jars = {}
     if app_dir_path:
     if app_dir_path:
-      for scr_file, dest_file in params.app_dir_files.iteritems():
-        CopyFromLocal(scr_file,
-                      mode=0755,
-                      owner=params.tez_user,
-                      dest_dir=app_dir_path,
-                      dest_file=dest_file,
-                      kinnit_if_needed=kinit_if_needed,
-                      hdfs_user=params.hdfs_user,
-                      hadoop_bin_dir=params.hadoop_bin_dir,
-                      hadoop_conf_dir=params.hadoop_conf_dir
-        )
-
+      tez_jars[params.tez_local_api_jars] = app_dir_path
     if lib_dir_path:
     if lib_dir_path:
-      CopyFromLocal(params.tez_local_lib_jars,
-                    mode=0755,
-                    owner=params.tez_user,
-                    dest_dir=lib_dir_path,
-                    kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user,
-                    hadoop_bin_dir=params.hadoop_bin_dir,
-                    hadoop_conf_dir=params.hadoop_conf_dir
-      )
-    pass
+      tez_jars[params.tez_local_lib_jars] = lib_dir_path
+
+    for src_file_regex, dest_dir in tez_jars.iteritems():
+      for src_filepath in glob.glob(src_file_regex):
+        src_filename = os.path.basename(src_filepath)
+        params.HdfsResource(format("{dest_dir}/{src_filename}"),
+                            type="file",
+                            action="create_on_execute",
+                            source=src_filepath,
+                            mode=0755,
+                            owner=params.tez_user
+         )
+        
+    for src_file_regex, dest_dir in tez_jars.iteritems():
+      for src_filepath in glob.glob(src_file_regex):
+        src_filename = os.path.basename(src_filepath)
+        params.HdfsResource(format("{dest_dir}/{src_filename}"),
+                            type="file",
+                            action="create_on_execute",
+                            source=src_filepath,
+                            mode=0755,
+                            owner=params.tez_user
+         )
+    params.HdfsResource(None, action="execute")
 
 
 
 
 def get_tez_hdfs_dir_paths(tez_lib_uris = None):
 def get_tez_hdfs_dir_paths(tez_lib_uris = None):

+ 8 - 19
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py

@@ -117,34 +117,23 @@ class MapReduce2ServiceCheckDefault(MapReduce2ServiceCheck):
     input_file = format("/user/{smokeuser}/mapredsmokeinput")
     input_file = format("/user/{smokeuser}/mapredsmokeinput")
     output_file = format("/user/{smokeuser}/mapredsmokeoutput")
     output_file = format("/user/{smokeuser}/mapredsmokeoutput")
 
 
-    cleanup_cmd = format("fs -rm -r -f {output_file} {input_file}")
-    create_file_cmd = format("fs -put /etc/passwd {input_file}")
     test_cmd = format("fs -test -e {output_file}")
     test_cmd = format("fs -test -e {output_file}")
     run_wordcount_job = format("jar {jar_path} wordcount {input_file} {output_file}")
     run_wordcount_job = format("jar {jar_path} wordcount {input_file} {output_file}")
 
 
+    params.HdfsResource(input_file,
+                        action = "create_on_execute",
+                        type = "file",
+                        source = "/etc/passwd",
+    )
+    params.HdfsResource(None, action="execute")
+
     if params.security_enabled:
     if params.security_enabled:
       kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
       kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
 
 
       Execute(kinit_cmd,
       Execute(kinit_cmd,
               user=params.smokeuser
               user=params.smokeuser
       )
       )
-
-    ExecuteHadoop(cleanup_cmd,
-                  tries=1,
-                  try_sleep=5,
-                  user=params.smokeuser,
-                  bin_dir=params.execute_path,
-                  conf_dir=params.hadoop_conf_dir
-    )
-
-    ExecuteHadoop(create_file_cmd,
-                  tries=1,
-                  try_sleep=5,
-                  user=params.smokeuser,
-                  bin_dir=params.execute_path,
-                  conf_dir=params.hadoop_conf_dir
-    )
-
+      
     ExecuteHadoop(run_wordcount_job,
     ExecuteHadoop(run_wordcount_job,
                   tries=1,
                   tries=1,
                   try_sleep=5,
                   try_sleep=5,

+ 26 - 11
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py

@@ -20,13 +20,16 @@ Ambari Agent
 """
 """
 import os
 import os
 
 
+<<<<<<< HEAD
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
+=======
+from resource_management import *
+>>>>>>> AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 
 import status_params
 import status_params
 
 
@@ -52,7 +55,8 @@ stack_name = default("/hostLevelParams/stack_name", None)
 
 
 # This is expected to be of the form #.#.#.#
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
+hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
 
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
@@ -72,7 +76,6 @@ yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
 
 
 # hadoop parameters for 2.2+
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-
   # MapR directory root
   # MapR directory root
   mapred_role_root = "hadoop-mapreduce-client"
   mapred_role_root = "hadoop-mapreduce-client"
   command_role = default("/role", "")
   command_role = default("/role", "")
@@ -94,7 +97,19 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_yarn_home = format("/usr/hdp/current/{yarn_role_root}")
   hadoop_yarn_home = format("/usr/hdp/current/{yarn_role_root}")
   yarn_bin = format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_bin = format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_container_bin = format("/usr/hdp/current/{yarn_role_root}/bin")
   yarn_container_bin = format("/usr/hdp/current/{yarn_role_root}/bin")
+  
+  mapreduce_tar_source = config['configurations']['cluster-env']['mapreduce_tar_source']
+  mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
+
+<<<<<<< HEAD
+=======
+  # the configuration direction for HDFS/YARN/MapR is the hadoop config
+  # directory, which is symlinked by hadoop-client only
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
+  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
 
 
+>>>>>>> AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
 
 
 limits_conf_dir = "/etc/security/limits.d"
 limits_conf_dir = "/etc/security/limits.d"
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + yarn_container_bin
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + yarn_container_bin
@@ -221,17 +236,17 @@ tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
 update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
 update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
 
 
 mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)

+ 9 - 3
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py

@@ -22,7 +22,6 @@ Ambari Agent
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
   FILE_TYPE_XML
@@ -104,11 +103,18 @@ class ResourcemanagerDefault(Resourcemanager):
     self.configure(env) # FOR SECURITY
     self.configure(env) # FOR SECURITY
     if params.is_supported_yarn_ranger:
     if params.is_supported_yarn_ranger:
       setup_ranger_yarn() #Ranger Yarn Plugin related calls 
       setup_ranger_yarn() #Ranger Yarn Plugin related calls 
-    if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.1') == 0:
+    if not Script.is_hdp_stack_greater_or_equal("2.2"):
       install_tez_jars()
       install_tez_jars()
     else:
     else:
       # will work only for stack versions >=2.2
       # will work only for stack versions >=2.2
-      copy_tarballs_to_hdfs('tez', 'hadoop-yarn-resourcemanager', params.tez_user, params.hdfs_user, params.user_group)
+      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.tez_tar_source,
+                          group=params.user_group,
+                          owner=params.hdfs_user
+      )
+      params.HdfsResource(None, action="execute")
     service('resourcemanager', action='start')
     service('resourcemanager', action='start')
 
 
   def status(self, env):
   def status(self, env):

+ 1 - 1
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py

@@ -84,7 +84,7 @@ class ServiceCheckDefault(ServiceCheck):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
+    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >= 0:
       path_to_distributed_shell_jar = "/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-applications-distributedshell.jar"
       path_to_distributed_shell_jar = "/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-applications-distributedshell.jar"
     else:
     else:
       path_to_distributed_shell_jar = "/usr/lib/hadoop-yarn/hadoop-yarn-applications-distributedshell*.jar"
       path_to_distributed_shell_jar = "/usr/lib/hadoop-yarn/hadoop-yarn-applications-distributedshell*.jar"

+ 19 - 19
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py

@@ -61,37 +61,35 @@ def yarn(name = None):
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def yarn(name = None):
 def yarn(name = None):
   import params
   import params
-  if name in ["nodemanager","historyserver"]:
+  if name == "historyserver":
     if params.yarn_log_aggregation_enabled:
     if params.yarn_log_aggregation_enabled:
-      params.HdfsDirectory(params.yarn_nm_app_log_dir,
-                           action="create_delayed",
+      params.HdfsResource(params.yarn_nm_app_log_dir,
+                           action="create_on_execute",
+                           type="directory",
                            owner=params.yarn_user,
                            owner=params.yarn_user,
                            group=params.user_group,
                            group=params.user_group,
                            mode=0777,
                            mode=0777,
                            recursive_chmod=True
                            recursive_chmod=True
       )
       )
-    params.HdfsDirectory("/mapred",
-                         action="create_delayed",
+    params.HdfsResource("/mapred",
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.mapred_user
                          owner=params.mapred_user
     )
     )
-    params.HdfsDirectory("/mapred/system",
-                         action="create_delayed",
+    params.HdfsResource("/mapred/system",
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.hdfs_user
                          owner=params.hdfs_user
     )
     )
-    params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
-                         action="create_delayed",
+    params.HdfsResource(params.mapreduce_jobhistory_done_dir,
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.mapred_user,
                          owner=params.mapred_user,
                          group=params.user_group,
                          group=params.user_group,
+                         change_permissions_for_parents=True,
                          mode=0777
                          mode=0777
     )
     )
-
-    params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
-                         action="create_delayed",
-                         owner=params.mapred_user,
-                         group=params.user_group,
-                         mode=01777
-    )
-    params.HdfsDirectory(None, action="create")
+    params.HdfsResource(None, action="execute")
 
 
   if name == "nodemanager":
   if name == "nodemanager":
     Directory(params.nm_local_dirs.split(',') + params.nm_log_dirs.split(','),
     Directory(params.nm_local_dirs.split(',') + params.nm_log_dirs.split(','),
@@ -178,12 +176,14 @@ def yarn(name = None):
        group=params.user_group
        group=params.user_group
     )
     )
     if not is_empty(params.node_label_enable) and params.node_label_enable or is_empty(params.node_label_enable) and params.node_labels_dir:
     if not is_empty(params.node_label_enable) and params.node_label_enable or is_empty(params.node_label_enable) and params.node_labels_dir:
-      params.HdfsDirectory(params.node_labels_dir,
-                           action="create",
+      params.HdfsResource(params.node_labels_dir,
+                           type="directory",
+                           action="create_on_execute",
                            owner=params.yarn_user,
                            owner=params.yarn_user,
                            group=params.user_group,
                            group=params.user_group,
                            mode=0700
                            mode=0700
       )
       )
+      params.HdfsResource(None, action="execute")
   elif name == 'apptimelineserver':
   elif name == 'apptimelineserver':
     Directory(params.ats_leveldb_dir,
     Directory(params.ats_leveldb_dir,
        owner=params.yarn_user,
        owner=params.yarn_user,

+ 103 - 89
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py

@@ -280,36 +280,42 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2'
                               content='log4jproperties\nline2'
     )
     )
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+
+    self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0711,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hbase',
     self.assertResourceCalled('Directory', '/etc/hbase',
@@ -401,36 +407,41 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2'
                               content='log4jproperties\nline2'
     )
     )
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+    self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0711,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
 
 
   def test_start_default_22(self):
   def test_start_default_22(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
@@ -528,35 +539,38 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2')
                               content='log4jproperties\nline2')
 
 
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'])
-
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'])
-
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create'])
+    self.assertResourceCalled('HdfsResource', 'hdfs://nn1/apps/hbase/data',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0711,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
 
 
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /usr/hdp/current/hbase-master/conf start master',
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /usr/hdp/current/hbase-master/conf start master',
       not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
       not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',

+ 0 - 91
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py

@@ -207,36 +207,6 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2'
                               content='log4jproperties\nline2'
     )
     )
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hbase',
     self.assertResourceCalled('Directory', '/etc/hbase',
@@ -328,36 +298,6 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2'
                               content='log4jproperties\nline2'
     )
     )
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
 
 
   def test_start_default_22(self):
   def test_start_default_22(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
@@ -453,37 +393,6 @@ class TestHbaseRegionServer(RMFTestCase):
                               group='hadoop',
                               group='hadoop',
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2')
                               content='log4jproperties\nline2')
-
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'])
-
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'])
-
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create'])
-
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /usr/hdp/current/hbase-regionserver/conf start regionserver',
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /usr/hdp/current/hbase-regionserver/conf start regionserver',
       not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
       not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
       user = 'hbase')
       user = 'hbase')

+ 6 - 0
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py

@@ -380,6 +380,9 @@ class TestDatanode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -431,6 +434,9 @@ class TestDatanode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'root',
                               owner = 'root',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',

+ 6 - 0
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py

@@ -209,6 +209,9 @@ class TestJournalnode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
     self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
@@ -247,6 +250,9 @@ class TestJournalnode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'root',
                               owner = 'root',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
 
 
 
 
   @patch('time.sleep')
   @patch('time.sleep')

+ 234 - 214
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

@@ -100,38 +100,40 @@ class TestNamenode(RMFTestCase):
                               user = 'hdfs',
                               user = 'hdfs',
                               try_sleep = 10,
                               try_sleep = 10,
                               )
                               )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              only_if = None,
-                              )
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        only_if = None,
+        keytab = UnknownConfigurationMock(),
+        hadoop_bin_dir = '/usr/bin',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
     pass
     pass
 
 
@@ -205,38 +207,40 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         user = 'hdfs',
         try_sleep = 10,
         try_sleep = 10,
     )
     )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              only_if = None,
-                              )
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        only_if = None,
+        keytab = UnknownConfigurationMock(),
+        hadoop_bin_dir = '/usr/bin',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def test_stop_default(self):
   def test_stop_default(self):
@@ -332,38 +336,40 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         user = 'hdfs',
         try_sleep = 10,
         try_sleep = 10,
     )
     )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              only_if = None,
-                              )
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        only_if = None,
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_bin_dir = '/usr/bin',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def test_stop_secured(self):
   def test_stop_secured(self):
@@ -429,37 +435,39 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         user = 'hdfs',
         try_sleep = 10,
         try_sleep = 10,
     )
     )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
+    self.assertResourceCalled('HdfsResource', '/tmp',
         security_enabled = False,
         security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        action = ['create'],
-        bin_dir = '/usr/bin',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
         only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
         only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+        keytab = UnknownConfigurationMock(),
+        hadoop_bin_dir = '/usr/bin',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -508,37 +516,39 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         user = 'hdfs',
         try_sleep = 10,
         try_sleep = 10,
     )
     )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
+    self.assertResourceCalled('HdfsResource', '/tmp',
         security_enabled = True,
         security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        action = ['create'],
-        bin_dir = '/usr/bin',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
         only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
         only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_bin_dir = '/usr/bin',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -596,38 +606,40 @@ class TestNamenode(RMFTestCase):
                               user = 'hdfs',
                               user = 'hdfs',
                               try_sleep = 10,
                               try_sleep = 10,
                               )
                               )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
-                              )
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+        keytab = UnknownConfigurationMock(),
+        hadoop_bin_dir = '/usr/bin',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   # tests namenode start command when NameNode HA is enabled, and
   # tests namenode start command when NameNode HA is enabled, and
@@ -681,38 +693,40 @@ class TestNamenode(RMFTestCase):
                               user = 'hdfs',
                               user = 'hdfs',
                               try_sleep = 10,
                               try_sleep = 10,
                               )
                               )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
-                              )
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+        keytab = UnknownConfigurationMock(),
+        hadoop_bin_dir = '/usr/bin',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def test_decommission_default(self):
   def test_decommission_default(self):
@@ -829,6 +843,9 @@ class TestNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -868,6 +885,9 @@ class TestNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'root',
                               owner = 'root',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',

+ 6 - 0
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py

@@ -221,6 +221,9 @@ class TestNFSGateway(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
@@ -253,6 +256,9 @@ class TestNFSGateway(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'root',
                               owner = 'root',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
 
 
 
 
   @patch("resource_management.libraries.functions.security_commons.build_expectations")
   @patch("resource_management.libraries.functions.security_commons.build_expectations")

+ 34 - 25
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py

@@ -59,36 +59,45 @@ class TestServiceCheck(RMFTestCase):
         bin_dir = '/usr/bin',
         bin_dir = '/usr/bin',
         user = 'hdfs',
         user = 'hdfs',
     )
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
-        conf_dir = '/etc/hadoop/conf',
-        logoutput = True,
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]/usr/bin/hadoop --config /etc/hadoop/conf fs -test -e /tmp'",
-        try_sleep = 3,
-        tries = 5,
-        bin_dir = '/usr/bin',
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        action = ['create_on_execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        mode = 0777,
     )
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -chmod 777 /tmp',
-        conf_dir = '/etc/hadoop/conf',
-        logoutput = True,
-        try_sleep = 3,
-        tries = 5,
-        bin_dir = '/usr/bin',
+    self.assertResourceCalled('HdfsResource', '/tmp/',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        action = ['delete_on_execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
     )
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop --config /etc/hadoop/conf fs -put /etc/passwd /tmp/',
-        logoutput = True,
-        tries = 5,
-        conf_dir = '/etc/hadoop/conf',
-        bin_dir = '/usr/bin',
-        try_sleep = 3,
+    self.assertResourceCalled('HdfsResource', '/tmp/',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/etc/passwd',
         user = 'hdfs',
         user = 'hdfs',
+        action = ['create_on_execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
     )
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/',
-        logoutput = True,
-        tries = 5,
-        conf_dir = '/etc/hadoop/conf',
-        bin_dir = '/usr/bin',
-        try_sleep = 3,
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
+    self.assertNoMoreResources()

+ 6 - 0
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py

@@ -230,6 +230,9 @@ class TestSNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -276,6 +279,9 @@ class TestSNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'root',
                               owner = 'root',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',

+ 12 - 0
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py

@@ -64,6 +64,9 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -155,6 +158,9 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'root',
                               owner = 'root',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -245,6 +251,9 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -307,6 +316,9 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',

+ 225 - 103
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -22,16 +22,19 @@ import socket
 import subprocess
 import subprocess
 
 
 from mock.mock import MagicMock, patch
 from mock.mock import MagicMock, patch
+from resource_management.libraries.functions import version
 from resource_management.core import shell
 from resource_management.core import shell
-from resource_management.libraries.functions import dynamic_variable_interpretation
+from resource_management.libraries.script.script import Script
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
 
 
+
+@patch.object(version, "get_hdp_build_version", new = MagicMock(return_value="2.0.0.0-1234"))
 @patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
 @patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
 class TestHiveServer(RMFTestCase):
 class TestHiveServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
   STACK_VERSION = "2.0.6"
   UPGRADE_STACK_VERSION = "2.2"
   UPGRADE_STACK_VERSION = "2.2"
-
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   def test_configure_default(self):
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
                        classname = "HiveServer",
@@ -44,7 +47,7 @@ class TestHiveServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   @patch("socket.socket")
   @patch("socket.socket")
-  @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   def test_start_default(self, socket_mock):
   def test_start_default(self, socket_mock):
     s = socket_mock.return_value
     s = socket_mock.return_value
 
 
@@ -83,9 +86,8 @@ class TestHiveServer(RMFTestCase):
                               )
                               )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
-
-  @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
-  def test_start_default_no_copy(self, get_tar_mock):
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+  def test_start_default_no_copy(self):
 
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
                        classname = "HiveServer",
@@ -95,7 +97,6 @@ class TestHiveServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
 
 
-    get_tar_mock.return_value = ("a", "b")
     self.assert_configure_default()
     self.assert_configure_default()
 
 
     self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
     self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
@@ -118,10 +119,9 @@ class TestHiveServer(RMFTestCase):
                               timeout = 30,
                               timeout = 30,
                               )
                               )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
-    self.assertFalse(get_tar_mock.called)
 
 
-  @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
-  def test_start_default_alt_tmp(self, get_tar_mock):
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+  def test_start_default_alt_tmp(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
                        classname = "HiveServer",
                        command = "start",
                        command = "start",
@@ -130,7 +130,6 @@ class TestHiveServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
 
 
-    get_tar_mock.return_value = ("a", "b")
     self.assert_configure_default(no_tmp=True)
     self.assert_configure_default(no_tmp=True)
 
 
     self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
     self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
@@ -153,11 +152,10 @@ class TestHiveServer(RMFTestCase):
                               timeout = 30,
                               timeout = 30,
                               )
                               )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
-    self.assertFalse(get_tar_mock.called)
 
 
 
 
-  @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
-  def test_start_default_alt_nn_ha_tmp(self, get_tar_mock):
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+  def test_start_default_alt_nn_ha_tmp(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
                        classname = "HiveServer",
                        command = "start",
                        command = "start",
@@ -166,7 +164,6 @@ class TestHiveServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
 
 
-    get_tar_mock.return_value = ("a", "b")
     self.assert_configure_default(no_tmp=True)
     self.assert_configure_default(no_tmp=True)
 
 
     self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
     self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
@@ -189,9 +186,8 @@ class TestHiveServer(RMFTestCase):
                               timeout = 30,
                               timeout = 30,
                               )
                               )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
-    self.assertFalse(get_tar_mock.called)
 
 
-  @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   def test_stop_default(self):
   def test_stop_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
                        classname = "HiveServer",
@@ -217,7 +213,7 @@ class TestHiveServer(RMFTestCase):
     
     
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
-    
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   def test_configure_secured(self):
   def test_configure_secured(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
                        classname = "HiveServer",
@@ -231,6 +227,7 @@ class TestHiveServer(RMFTestCase):
 
 
   @patch("hive_service.check_fs_root")
   @patch("hive_service.check_fs_root")
   @patch("socket.socket")
   @patch("socket.socket")
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   def test_start_secured(self, socket_mock, check_fs_root_mock):
   def test_start_secured(self, socket_mock, check_fs_root_mock):
     s = socket_mock.return_value
     s = socket_mock.return_value
 
 
@@ -276,6 +273,7 @@ class TestHiveServer(RMFTestCase):
 
 
 
 
   @patch("socket.socket")
   @patch("socket.socket")
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   def test_stop_secured(self, socket_mock):
   def test_stop_secured(self, socket_mock):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
                        classname = "HiveServer",
@@ -302,51 +300,89 @@ class TestHiveServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self, no_tmp = False):
   def assert_configure_default(self, no_tmp = False):
-    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
-                              security_enabled=False,
-                              keytab=UnknownConfigurationMock(),
-                              conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs',
-                              kinit_path_local='/usr/bin/kinit',
-                              mode=0777,
-                              owner='hive',
-                              bin_dir='/usr/bin',
-                              action=['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsDirectory', '/user/hive',
-                              security_enabled=False,
-                              keytab=UnknownConfigurationMock(),
-                              conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs',
-                              kinit_path_local='/usr/bin/kinit',
-                              mode=0700,
-                              owner='hive',
-                              bin_dir='/usr/bin',
-                              action=['create_delayed'],
+    self.assertResourceCalled('HdfsResource', '/apps/webhcat',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hcat',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/hcat',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hcat',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        source = '/usr/share/HDP-webhcat/hive.tar.gz',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['create_on_execute'],
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'file',
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hive',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/hive',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hive',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0700,
     )
     )
-
     if not no_tmp:
     if not no_tmp:
-      self.assertResourceCalled('HdfsDirectory', '/custompath/tmp/hive',
-                                security_enabled=False,
-                                keytab=UnknownConfigurationMock(),
-                                conf_dir='/etc/hadoop/conf',
-                                hdfs_user='hdfs',
-                                kinit_path_local='/usr/bin/kinit',
-                                mode=0777,
-                                owner='hive',
-                                group='hdfs',
-                                action=['create_delayed'],
-                                bin_dir='/usr/bin',
+      self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
+          security_enabled = False,
+          hadoop_conf_dir = '/etc/hadoop/conf',
+          keytab = UnknownConfigurationMock(),
+          kinit_path_local = '/usr/bin/kinit',
+          user = 'hdfs',
+          owner = 'hive',
+          group = 'hdfs',
+          hadoop_bin_dir = '/usr/bin',
+          type = 'directory',
+          action = ['create_on_execute'],
+          mode = 0777,
       )
       )
-
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled=False,
-                              keytab=UnknownConfigurationMock(),
-                              conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs',
-                              kinit_path_local='/usr/bin/kinit',
-                              action=['create'],
-                              bin_dir='/usr/bin',
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
     self.assertResourceCalled('Directory', '/etc/hive',
     self.assertResourceCalled('Directory', '/etc/hive',
                               mode=0755,
                               mode=0755,
@@ -453,49 +489,88 @@ class TestHiveServer(RMFTestCase):
 
 
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
-                              security_enabled=True,
-                              keytab='/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs',
-                              kinit_path_local='/usr/bin/kinit',
-                              mode=0777,
-                              owner='hive',
-                              bin_dir='/usr/bin',
-                              action=['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsDirectory', '/user/hive',
-                              security_enabled=True,
-                              keytab='/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs',
-                              kinit_path_local='/usr/bin/kinit',
-                              mode=0700,
-                              owner='hive',
-                              bin_dir='/usr/bin',
-                              action=['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsDirectory', '/custompath/tmp/hive',
-                              security_enabled=True,
-                              keytab='/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs',
-                              kinit_path_local='/usr/bin/kinit',
-                              mode=0777,
-                              owner='hive',
-                              group='hdfs',
-                              action=['create_delayed'],
-                              bin_dir='/usr/bin',
-    )
-
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled=True,
-                              keytab='/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs',
-                              kinit_path_local='/usr/bin/kinit',
-                              action=['create'],
-                              bin_dir='/usr/bin',
+    self.assertResourceCalled('HdfsResource', '/apps/webhcat',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hcat',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/hcat',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hcat',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        source = '/usr/share/HDP-webhcat/hive.tar.gz',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['create_on_execute'],
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'file',
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hive',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/hive',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hive',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0700,
+    )
+    self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hive',
+        group = 'hdfs',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
     self.assertResourceCalled('Directory', '/etc/hive',
     self.assertResourceCalled('Directory', '/etc/hive',
                               mode=0755,
                               mode=0755,
@@ -624,6 +699,7 @@ class TestHiveServer(RMFTestCase):
       self.assert_configure_default()
       self.assert_configure_default()
 
 
 
 
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
   @patch("hive_server.HiveServer.pre_rolling_restart")
   @patch("hive_server.HiveServer.pre_rolling_restart")
   @patch("hive_server.HiveServer.start")
   @patch("hive_server.HiveServer.start")
   def test_stop_during_upgrade(self, hive_server_start_mock,
   def test_stop_during_upgrade(self, hive_server_start_mock,
@@ -771,6 +847,7 @@ class TestHiveServer(RMFTestCase):
     )
     )
     put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
     put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
 
 
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
   def test_pre_rolling_restart(self):
   def test_pre_rolling_restart(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
@@ -785,9 +862,32 @@ class TestHiveServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
                               'hdp-select set hive-server2 %s' % version,)
                               'hdp-select set hive-server2 %s' % version,)
+    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.0.0.0-1234/mapreduce//mapreduce.tar.gz',
+        security_enabled = False,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        keytab = UnknownConfigurationMock(),
+        source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['create_on_execute'],
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        type = 'file',
+        mode = 0444,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   @patch("resource_management.core.shell.call")
   @patch("resource_management.core.shell.call")
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
   def test_pre_rolling_restart_23(self, call_mock):
   def test_pre_rolling_restart_23(self, call_mock):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
@@ -807,6 +907,28 @@ class TestHiveServer(RMFTestCase):
 
 
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
                               'hdp-select set hive-server2 %s' % version,)
                               'hdp-select set hive-server2 %s' % version,)
+    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.0.0.0-1234/mapreduce//mapreduce.tar.gz',
+        security_enabled = False,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        keytab = UnknownConfigurationMock(),
+        source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['create_on_execute'],
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        type = 'file',
+        mode = 0444,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
     self.assertEquals(2, mocks_dict['call'].call_count)
     self.assertEquals(2, mocks_dict['call'].call_count)

+ 90 - 16
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py

@@ -24,12 +24,13 @@ import datetime, sys, socket
 import  resource_management.libraries.functions
 import  resource_management.libraries.functions
 @patch.object(resource_management.libraries.functions, "get_unique_id_and_date", new = MagicMock(return_value=''))
 @patch.object(resource_management.libraries.functions, "get_unique_id_and_date", new = MagicMock(return_value=''))
 @patch("socket.socket")
 @patch("socket.socket")
+@patch("time.time", new=MagicMock(return_value=1431110511.43))
 class TestServiceCheck(RMFTestCase):
 class TestServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
   STACK_VERSION = "2.0.6"
 
 
-  @patch("sys.exit")
-  def test_service_check_default(self, sys_exit_mock, socket_mock):
+
+  def test_service_check_default(self, socket_mock):
 
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
                         classname="HiveServiceCheck",
                         classname="HiveServiceCheck",
@@ -82,16 +83,52 @@ class TestServiceCheck(RMFTestCase):
                               content = StaticFile('templetonSmoke.sh'),
                               content = StaticFile('templetonSmoke.sh'),
                               mode = 0755,
                               mode = 0755,
                               )
                               )
-    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 no_keytab false /usr/bin/kinit no_principal',
-                              logoutput = True,
-                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                              tries = 3,
-                              try_sleep = 5,
-                              )
+    self.assertResourceCalled('File', '/tmp/idtest.ambari-qa.1431110511.43.pig',
+        content = Template('templeton_smoke.pig.j2', templeton_test_input='/tmp/idtest.ambari-qa.1431110511.43.in', templeton_test_output='/tmp/idtest.ambari-qa.1431110511.43.out'),
+    )
+    self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.pig',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/tmp/idtest.ambari-qa.1431110511.43.pig',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.in',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/etc/passwd',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 idtest.ambari-qa.1431110511.43.pig no_keytab false /usr/bin/kinit no_principal',
+        logoutput = True,
+        path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+        tries = 3,
+        try_sleep = 5,
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
-  @patch("sys.exit")
-  def test_service_check_secured(self, sys_exit_mock, socket_mock):
+
+  def test_service_check_secured(self, socket_mock):
 
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
                         classname="HiveServiceCheck",
                         classname="HiveServiceCheck",
@@ -145,10 +182,47 @@ class TestServiceCheck(RMFTestCase):
                               content = StaticFile('templetonSmoke.sh'),
                               content = StaticFile('templetonSmoke.sh'),
                               mode = 0755,
                               mode = 0755,
                               )
                               )
-    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit ambari-qa@EXAMPLE.COM',
-                              logoutput = True,
-                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                              tries = 3,
-                              try_sleep = 5,
-                              )
+    
+    self.assertResourceCalled('File', '/tmp/idtest.ambari-qa.1431110511.43.pig',
+        content = Template('templeton_smoke.pig.j2', templeton_test_input='/tmp/idtest.ambari-qa.1431110511.43.in', templeton_test_output='/tmp/idtest.ambari-qa.1431110511.43.out'),
+    )
+    self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.pig',
+        action = ['create_on_execute'],
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/tmp/idtest.ambari-qa.1431110511.43.pig',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
+    )
+    self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.in',
+        action = ['create_on_execute'],
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/etc/passwd',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 idtest.ambari-qa.1431110511.43.pig /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit ambari-qa@EXAMPLE.COM',
+        logoutput = True,
+        path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+        tries = 3,
+        try_sleep = 5,
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()

+ 0 - 134
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py

@@ -117,37 +117,6 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
     self.assertResourceCalled('Directory', '/var/run/webhcat',
                               owner = 'hcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -165,42 +134,6 @@ class TestWebHCatServer(RMFTestCase):
                               group = 'hadoop',
                               group = 'hadoop',
                               recursive = True,
                               recursive = True,
                               )
                               )
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hadoop_bin_dir='/usr/bin',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/sqoop*.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hadoop_bin_dir='/usr/bin',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -225,37 +158,6 @@ class TestWebHCatServer(RMFTestCase):
                               )
                               )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
     self.assertResourceCalled('Directory', '/var/run/webhcat',
                               owner = 'hcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -277,42 +179,6 @@ class TestWebHCatServer(RMFTestCase):
                               path = ['/bin'],
                               path = ['/bin'],
                               user = 'hcat',
                               user = 'hcat',
                               )
                               )
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/sqoop*.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               group = 'hadoop',

+ 137 - 40
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py

@@ -50,17 +50,27 @@ class TestOozieServer(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
-    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0775,
-                              owner = 'oozie',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+    self.assertResourceCalled('HdfsResource', '/user/oozie',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'oozie',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0775,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
                               owner = 'oozie',
                               owner = 'oozie',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -325,16 +335,26 @@ class TestOozieServer(RMFTestCase):
 
 
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0775,
-                              owner = 'oozie',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
+    self.assertResourceCalled('HdfsResource', '/user/oozie',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'oozie',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0775,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
                               owner = 'oozie',
                               owner = 'oozie',
@@ -481,17 +501,29 @@ class TestOozieServer(RMFTestCase):
 
 
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0775,
-                              owner = 'oozie',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+    self.assertResourceCalled('HdfsResource', '/user/oozie',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'oozie',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0775,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
                               owner = 'oozie',
                               owner = 'oozie',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -829,9 +861,30 @@ class TestOozieServer(RMFTestCase):
     self.assertEqual(glob_mock.call_count,1)
     self.assertEqual(glob_mock.call_count,1)
     glob_mock.assert_called_with('/usr/hdp/2.2.1.0-2135/hadoop/lib/hadoop-lzo*.jar')
     glob_mock.assert_called_with('/usr/hdp/2.2.1.0-2135/hadoop/lib/hadoop-lzo*.jar')
 
 
-    self.assertResourceCalled('Execute', 'hdp-select set oozie-server 2.2.1.0-2135')
-    self.assertResourceCalled('Execute', 'hdfs dfs -chown oozie:hadoop /user/oozie/share', user='oozie')
-    self.assertResourceCalled('Execute', 'hdfs dfs -chmod -R 755 /user/oozie/share', user='oozie')
+    self.assertResourceCalled('Execute', 'hdp-select set oozie-server 2.2.1.0-2135',)
+    self.assertResourceCalled('HdfsResource', '/user/oozie/share',
+        security_enabled = False,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        keytab = UnknownConfigurationMock(),
+        user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        recursive_chmod = True,
+        owner = 'oozie',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/ooziedb.sh upgrade -run', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/ooziedb.sh upgrade -run', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020', user='oozie')
 
 
@@ -890,9 +943,32 @@ class TestOozieServer(RMFTestCase):
     glob_mock.assert_called_with('/usr/hdp/2.3.0.0-1234/hadoop/lib/hadoop-lzo*.jar')
     glob_mock.assert_called_with('/usr/hdp/2.3.0.0-1234/hadoop/lib/hadoop-lzo*.jar')
 
 
     self.assertResourceCalled('Execute', 'hdp-select set oozie-server 2.3.0.0-1234')
     self.assertResourceCalled('Execute', 'hdp-select set oozie-server 2.3.0.0-1234')
-    self.assertResourceCalled('Execute', 'hdfs dfs -chown oozie:hadoop /user/oozie/share', user='oozie')
-    self.assertResourceCalled('Execute', 'hdfs dfs -chmod -R 755 /user/oozie/share', user='oozie')
-    self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/ooziedb.sh upgrade -run', user='oozie')
+    self.assertResourceCalled('HdfsResource', '/user/oozie/share',
+        security_enabled = False,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        keytab = UnknownConfigurationMock(),
+        user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        recursive_chmod = True,
+        owner = 'oozie',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
+    self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/ooziedb.sh upgrade -run',
+        user = 'oozie',
+    )
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020', user='oozie')
 
 
     self.assertNoMoreResources()
     self.assertNoMoreResources()
@@ -943,8 +1019,29 @@ class TestOozieServer(RMFTestCase):
     isfile_mock.assert_called_with('/usr/share/HDP-oozie/ext-2.2.zip')
     isfile_mock.assert_called_with('/usr/share/HDP-oozie/ext-2.2.zip')
 
 
     self.assertResourceCalled('Execute', 'hdp-select set oozie-server 2.2.0.0-0000')
     self.assertResourceCalled('Execute', 'hdp-select set oozie-server 2.2.0.0-0000')
-    self.assertResourceCalled('Execute', 'hdfs dfs -chown oozie:hadoop /user/oozie/share', user='oozie')
-    self.assertResourceCalled('Execute', 'hdfs dfs -chmod -R 755 /user/oozie/share', user='oozie')
+    self.assertResourceCalled('HdfsResource', '/user/oozie/share',
+        security_enabled = False,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        keytab = UnknownConfigurationMock(),
+        user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        recursive_chmod = True,
+        owner = 'oozie',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/ooziedb.sh upgrade -run', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/ooziedb.sh upgrade -run', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020', user='oozie')
 
 

+ 43 - 2
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py

@@ -21,6 +21,7 @@ from stacks.utils.RMFTestCase import *
 import resource_management.libraries.functions
 import resource_management.libraries.functions
 from mock.mock import MagicMock, call, patch
 from mock.mock import MagicMock, call, patch
 
 
+@patch("glob.glob", new = MagicMock(return_value="/usr/something/oozie-client/lib"))
 @patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
 @patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
 class TestServiceCheck(RMFTestCase):
 class TestServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "OOZIE/4.0.0.2.0/package"
   COMMON_SERVICES_PACKAGE_DIR = "OOZIE/4.0.0.2.0/package"
@@ -55,10 +56,50 @@ class TestServiceCheck(RMFTestCase):
         content = StaticFile('oozieSmoke2.sh'),
         content = StaticFile('oozieSmoke2.sh'),
         mode = 0755,
         mode = 0755,
     )
     )
-    self.assertResourceCalled('Execute', '/tmp/oozieSmoke2.sh',
+    self.assertResourceCalled('File', '/tmp/prepareOozieHdfsDirectories.sh',
+        content = StaticFile('prepareOozieHdfsDirectories.sh'),
+        mode = 0755,
+    )
+    self.assertResourceCalled('Execute', '/tmp/prepareOozieHdfsDirectories.sh /etc/oozie/conf / /etc/hadoop/conf ',
         logoutput = True,
         logoutput = True,
         tries = 3,
         tries = 3,
-        command = '/tmp/oozieSmoke2.sh suse /var/lib/oozie /etc/oozie/conf /usr/bin /etc/hadoop/conf /usr/bin ambari-qa False',
+        try_sleep = 5,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/examples',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        source = '//examples',
+        user = 'hdfs',
+        action = ['create_on_execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        source = '//examples/input-data',
+        user = 'hdfs',
+        action = ['create_on_execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
+    self.assertResourceCalled('Execute', '/tmp/oozieSmoke2.sh suse /var/lib/oozie /etc/oozie/conf /usr/bin / /etc/hadoop/conf /usr/bin ambari-qa False',
+        logoutput = True,
         path = ['/usr/bin:/usr/bin'],
         path = ['/usr/bin:/usr/bin'],
+        tries = 3,
         try_sleep = 5,
         try_sleep = 5,
     )
     )
+

+ 61 - 23
ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py

@@ -4,7 +4,7 @@
 Licensed to the Apache Software Foundation (ASF) under one
 Licensed to the Apache Software Foundation (ASF) under one
 or more contributor license agreements.  See the NOTICE file
 or more contributor license agreements.  See the NOTICE file
 distributed with this work for additional information
 distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
+regarding copyright ownership.  The ASF licenses this file`
 to you under the Apache License, Version 2.0 (the
 to you under the Apache License, Version 2.0 (the
 "License"); you may not use this file except in compliance
 "License"); you may not use this file except in compliance
 with the License.  You may obtain a copy of the License at
 with the License.  You may obtain a copy of the License at
@@ -32,16 +32,35 @@ class TestPigServiceCheck(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
-    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ',
-      try_sleep = 5,
-      tries = 3,
-      user = 'ambari-qa',
-      conf_dir = '/etc/hadoop/conf',
-      security_enabled = False,
-      principal = UnknownConfigurationMock(),
-      keytab = UnknownConfigurationMock(),
-      bin_dir = '/usr/bin',
-      kinit_path_local = '/usr/bin/kinit'
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'ambari-qa',
+        action = ['delete_on_execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/etc/passwd',
+        user = 'ambari-qa',
+        action = ['create_on_execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
        
        
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
@@ -56,7 +75,7 @@ class TestPigServiceCheck(RMFTestCase):
       try_sleep = 5,
       try_sleep = 5,
     )
     )
        
        
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e pigsmoke.out',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
       user = 'ambari-qa',
       user = 'ambari-qa',
       bin_dir = '/usr/bin',
       bin_dir = '/usr/bin',
       conf_dir = '/etc/hadoop/conf',
       conf_dir = '/etc/hadoop/conf',
@@ -72,16 +91,35 @@ class TestPigServiceCheck(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
     
     
-    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ',
-      try_sleep = 5,
-      tries = 3,
-      user = 'ambari-qa',
-      principal = 'ambari-qa@EXAMPLE.COM',
-      conf_dir = '/etc/hadoop/conf',
-      security_enabled = True, 
-      keytab = '/etc/security/keytabs/smokeuser.headless.keytab',
-      bin_dir = '/usr/bin',
-      kinit_path_local = '/usr/bin/kinit'
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'ambari-qa',
+        action = ['delete_on_execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/etc/passwd',
+        user = 'ambari-qa',
+        action = ['create_on_execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
        
        
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
@@ -96,7 +134,7 @@ class TestPigServiceCheck(RMFTestCase):
       try_sleep = 5,
       try_sleep = 5,
     )
     )
        
        
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e pigsmoke.out',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
       user = 'ambari-qa',
       user = 'ambari-qa',
       bin_dir = '/usr/bin',
       bin_dir = '/usr/bin',
       conf_dir = '/etc/hadoop/conf',
       conf_dir = '/etc/hadoop/conf',

+ 148 - 133
ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py

@@ -19,6 +19,8 @@ limitations under the License.
 '''
 '''
 import json
 import json
 from mock.mock import MagicMock, call, patch
 from mock.mock import MagicMock, call, patch
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions import version
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
 import os
 import os
 
 
@@ -135,72 +137,66 @@ class TestHistoryServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/app-logs',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              recursive_chmod = True,
-                              owner = 'yarn',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              mode = 0777,
-                              bin_dir = '/usr/bin'
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'mapred',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 01777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+
+    self.assertResourceCalled('HdfsResource', '/app-logs',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        recursive_chmod = True,
+        owner = 'yarn',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/mapred',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'mapred',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', '/mapred/system',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', '/mr-history/done',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        change_permissions_for_parents = True,
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'mapred',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
       owner = 'yarn',
       owner = 'yarn',
       group = 'hadoop',
       group = 'hadoop',
@@ -350,72 +346,66 @@ class TestHistoryServer(RMFTestCase):
                               )
                               )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/app-logs',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              recursive_chmod = True,
-                              owner = 'yarn',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              bin_dir = '/usr/bin',
-                              mode = 0777,
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'mapred',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 01777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+
+    self.assertResourceCalled('HdfsResource', '/app-logs',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        recursive_chmod = True,
+        owner = 'yarn',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/mapred',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'mapred',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', '/mapred/system',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', '/mr-history/done',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        change_permissions_for_parents = True,
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'mapred',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
       owner = 'yarn',
       owner = 'yarn',
       group = 'hadoop',
       group = 'hadoop',
@@ -677,6 +667,8 @@ class TestHistoryServer(RMFTestCase):
     )
     )
     put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
     put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
 
 
+  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value="2.3.0"))
+  @patch.object(version, "get_hdp_build_version", new = MagicMock(return_value="2.3.0.0-1234"))
   def test_pre_rolling_restart_23(self):
   def test_pre_rolling_restart_23(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
@@ -695,9 +687,32 @@ class TestHistoryServer(RMFTestCase):
                        mocks_dict = mocks_dict)
                        mocks_dict = mocks_dict)
 
 
     self.assertResourceCalled('Execute', 'hdp-select set hadoop-mapreduce-historyserver %s' % version)
     self.assertResourceCalled('Execute', 'hdp-select set hadoop-mapreduce-historyserver %s' % version)
+    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.3.0.0-1234/mapreduce//mapreduce.tar.gz',
+        security_enabled = False,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        keytab = UnknownConfigurationMock(),
+        source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        type = 'file',
+        action = ['create_on_execute'],
+        mode = 0444,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
-    self.assertEquals(3, mocks_dict['call'].call_count)
+    self.assertEquals(2, mocks_dict['call'].call_count)
     self.assertEquals(
     self.assertEquals(
       "conf-select create-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version 0",
       "conf-select create-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version 0",
        mocks_dict['call'].call_args_list[0][0][0])
        mocks_dict['call'].call_args_list[0][0][0])

+ 3 - 0
ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py

@@ -19,6 +19,7 @@ limitations under the License.
 '''
 '''
 import json
 import json
 from mock.mock import MagicMock, call, patch
 from mock.mock import MagicMock, call, patch
+from resource_management.libraries.functions import version
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
 import os
 import os
 
 
@@ -27,6 +28,7 @@ origin_exists = os.path.exists
 @patch.object(os.path, "exists", new=MagicMock(
 @patch.object(os.path, "exists", new=MagicMock(
   side_effect=lambda *args: origin_exists(args[0])
   side_effect=lambda *args: origin_exists(args[0])
   if args[0][-2:] == "j2" else True))
   if args[0][-2:] == "j2" else True))
+@patch.object(version, "get_hdp_build_version", new = MagicMock(return_value="2.2.0.0-1234"))
 class TestMapReduce2Client(RMFTestCase):
 class TestMapReduce2Client(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
   STACK_VERSION = "2.0.6"
@@ -354,6 +356,7 @@ class TestMapReduce2Client(RMFTestCase):
                               )
                               )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
+  @patch.object(version, "get_hdp_build_version", new=MagicMock(return_value="2.2.0.0-2041"))
   def test_upgrade(self):
   def test_upgrade(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mapreduce2_client.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mapreduce2_client.py",
                    classname = "MapReduce2Client",
                    classname = "MapReduce2Client",

+ 38 - 26
ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py

@@ -39,19 +39,25 @@ class TestServiceCheck(RMFTestCase):
                       hdp_stack_version = self.STACK_VERSION,
                       hdp_stack_version = self.STACK_VERSION,
                       target = RMFTestCase.TARGET_COMMON_SERVICES
                       target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /user/ambari-qa/mapredsmokeoutput /user/ambari-qa/mapredsmokeinput',
-                      try_sleep = 5,
-                      tries = 1,
-                      user = 'ambari-qa',
-                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
-                      conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mapredsmokeinput',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/etc/passwd',
+        user = 'hdfs',
+        action = ['create_on_execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
     )
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -put /etc/passwd /user/ambari-qa/mapredsmokeinput',
-                      try_sleep = 5,
-                      tries = 1,
-                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
-                      user = 'ambari-qa',
-                      conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
     self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples-2.*.jar wordcount /user/ambari-qa/mapredsmokeinput /user/ambari-qa/mapredsmokeoutput',
     self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples-2.*.jar wordcount /user/ambari-qa/mapredsmokeinput /user/ambari-qa/mapredsmokeoutput',
                       logoutput = True,
                       logoutput = True,
@@ -77,22 +83,28 @@ class TestServiceCheck(RMFTestCase):
                       hdp_stack_version = self.STACK_VERSION,
                       hdp_stack_version = self.STACK_VERSION,
                       target = RMFTestCase.TARGET_COMMON_SERVICES
                       target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;',
-                      user = 'ambari-qa',
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mapredsmokeinput',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/etc/passwd',
+        user = 'hdfs',
+        action = ['create_on_execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
     )
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /user/ambari-qa/mapredsmokeoutput /user/ambari-qa/mapredsmokeinput',
-                      try_sleep = 5,
-                      tries = 1,
-                      user = 'ambari-qa',
-                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
-                      conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -put /etc/passwd /user/ambari-qa/mapredsmokeinput',
-                      try_sleep = 5,
-                      tries = 1,
-                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
-                      user = 'ambari-qa',
-                      conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;',
+        user = 'ambari-qa',
     )
     )
     self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples-2.*.jar wordcount /user/ambari-qa/mapredsmokeinput /user/ambari-qa/mapredsmokeoutput',
     self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples-2.*.jar wordcount /user/ambari-qa/mapredsmokeinput /user/ambari-qa/mapredsmokeoutput',
                       logoutput = True,
                       logoutput = True,

+ 0 - 132
ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py

@@ -134,72 +134,6 @@ class TestNodeManager(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/app-logs',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              recursive_chmod = True,
-                              owner = 'yarn',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              bin_dir = '/usr/bin',
-                              mode = 0777,
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'mapred',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 01777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
                               owner = 'yarn',
                               owner = 'yarn',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -386,72 +320,6 @@ class TestNodeManager(RMFTestCase):
                               )
                               )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/app-logs',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              recursive_chmod = True,
-                              owner = 'yarn',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              bin_dir = '/usr/bin',
-                              mode = 0777,
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'mapred',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              bin_dir = '/usr/bin',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 01777,
-                              bin_dir = '/usr/bin',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              bin_dir = '/usr/bin',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              )
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
                               owner = 'yarn',
                               owner = 'yarn',
                               group = 'hadoop',
                               group = 'hadoop',

+ 38 - 1
ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py

@@ -21,12 +21,17 @@ import json
 import os
 import os
 from mock.mock import MagicMock, call, patch
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
+from resource_management.libraries.functions import version
+from resource_management.libraries.script.script import Script
 
 
 origin_exists = os.path.exists
 origin_exists = os.path.exists
 @patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
 @patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
 @patch.object(os.path, "exists", new=MagicMock(
 @patch.object(os.path, "exists", new=MagicMock(
   side_effect=lambda *args: origin_exists(args[0])
   side_effect=lambda *args: origin_exists(args[0])
   if args[0][-2:] == "j2" else True))
   if args[0][-2:] == "j2" else True))
+
+@patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+@patch.object(version, "get_hdp_build_version", new = MagicMock(return_value="2.0.0.0-1234"))
 class TestResourceManager(RMFTestCase):
 class TestResourceManager(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
   STACK_VERSION = "2.0.6"
@@ -53,6 +58,39 @@ class TestResourceManager(RMFTestCase):
 
 
     self.assert_configure_default()
     self.assert_configure_default()
 
 
+    self.assertResourceCalled('HdfsResource', '/apps/tez/',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'tez',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/tez/lib/',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'tez',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
                               action = ['delete'],
                               action = ['delete'],
                               not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
                               not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
@@ -106,7 +144,6 @@ class TestResourceManager(RMFTestCase):
     self.assert_configure_secured()
     self.assert_configure_secured()
 
 
     pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1'
     pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1'
-
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
                               not_if = pid_check_cmd,
                               not_if = pid_check_cmd,
                               action=['delete'])
                               action=['delete'])

+ 2 - 0
ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py

@@ -21,6 +21,7 @@ import json
 import os
 import os
 from mock.mock import MagicMock, call, patch
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
+from resource_management.libraries.functions import version
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 
 
 origin_exists = os.path.exists
 origin_exists = os.path.exists
@@ -515,6 +516,7 @@ class TestYarnClient(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
 
 
+  @patch.object(version, "get_hdp_build_version", new=MagicMock(return_value="2.2.0.0-2041"))
   def test_upgrade(self):
   def test_upgrade(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/yarn_client.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/yarn_client.py",
                    classname = "YarnClient",
                    classname = "YarnClient",

+ 11 - 2
ambari-server/src/test/python/stacks/2.0.6/configs/default.json

@@ -532,8 +532,17 @@
         "smokeuser": "ambari-qa",
         "smokeuser": "ambari-qa",
         "kerberos_domain": "EXAMPLE.COM",
         "kerberos_domain": "EXAMPLE.COM",
         "user_group": "hadoop",
         "user_group": "hadoop",
-        "mapreduce_tar_destination_folder" : "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-        "mapreduce_tar_source" : "/usr/hdp/current/hadoop-client/mapreduce.tar.gz"
+        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
+        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
+        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
+        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
+        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
+        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
+        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
+        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
+        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
+        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
+        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
       },
       },
 
 
       "hbase-env": {
       "hbase-env": {

+ 12 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/secured.json

@@ -547,7 +547,18 @@
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
             "user_group": "hadoop",
             "user_group": "hadoop",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
-            "kinit_path_local": "/usr/bin"
+            "kinit_path_local": "/usr/bin",
+	        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
+	        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
+	        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
+	        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
+	        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
+	        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
+	        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
+	        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
+	        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
+	        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
+	        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
         },
         },
         "hadoop-env": {
         "hadoop-env": {
             "namenode_opt_maxnewsize": "200m",
             "namenode_opt_maxnewsize": "200m",

+ 24 - 23
ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py

@@ -122,30 +122,31 @@ class TestFalconServer(RMFTestCase):
                               owner = 'falcon',
                               owner = 'falcon',
                               recursive = True
                               recursive = True
                               )
                               )
-    self.assertResourceCalled('HdfsDirectory', '/apps/falcon',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'falcon',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed']
-                              )
+    self.assertResourceCalled('HdfsResource', '/apps/falcon',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'falcon',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0777,
+    )
     self.assertResourceCalled('Directory', '/hadoop/falcon/store',
     self.assertResourceCalled('Directory', '/hadoop/falcon/store',
-                              owner = 'falcon',
-                              recursive = True
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+        owner = 'falcon',
+        recursive = True,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('Directory', '/hadoop/falcon',
     self.assertResourceCalled('Directory', '/hadoop/falcon',
                               owner = 'falcon',
                               owner = 'falcon',
                               recursive = True,
                               recursive = True,

+ 1 - 1
ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py

@@ -21,7 +21,7 @@ limitations under the License.
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
 
 
 
 
-class TestFalconServer(RMFTestCase):
+class TestFalconServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "FALCON/0.5.0.2.1/package"
   COMMON_SERVICES_PACKAGE_DIR = "FALCON/0.5.0.2.1/package"
   STACK_VERSION = "2.1"
   STACK_VERSION = "2.1"
 
 

+ 48 - 52
ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py

@@ -21,7 +21,7 @@ limitations under the License.
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
 
 
 
 
-class TestFalconServer(RMFTestCase):
+class TestTezServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "TEZ/0.4.0.2.1/package"
   COMMON_SERVICES_PACKAGE_DIR = "TEZ/0.4.0.2.1/package"
   STACK_VERSION = "2.1"
   STACK_VERSION = "2.1"
 
 
@@ -33,60 +33,56 @@ class TestFalconServer(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /tmp/tezsmokeinput /tmp/tezsmokeoutput',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              try_sleep = 5,
-                              kinit_path_local = '/usr/bin/kinit',
-                              tries = 3,
-                              user = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              principal = UnknownConfigurationMock(),
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp/tezsmokeinput',
-                              try_sleep = 5,
-                              tries = 3,
-                              bin_dir = '/usr/bin',
-                              user = 'ambari-qa',
-                              conf_dir = '/etc/hadoop/conf',
-                              )
     self.assertResourceCalled('File', '/tmp/sample-tez-test',
     self.assertResourceCalled('File', '/tmp/sample-tez-test',
-                              content = 'foo\nbar\nfoo\nbar\nfoo',
-                              mode = 0755,
-                              )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -put /tmp/sample-tez-test /tmp/tezsmokeinput/',
-                              try_sleep = 5,
-                              tries = 3,
-                              bin_dir = '/usr/bin',
-                              user = 'ambari-qa',
-                              conf_dir = '/etc/hadoop/conf',
-                              )
+        content = 'foo\nbar\nfoo\nbar\nfoo',
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/tmp/tezsmokeinput',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', '/tmp/tezsmokeinput/sample-tez-test',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/tmp/sample-tez-test',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/tez/tez-mapreduce-examples*.jar orderedwordcount /tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/',
     self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/tez/tez-mapreduce-examples*.jar orderedwordcount /tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/',
-                              try_sleep = 5,
-                              tries = 3,
-                              bin_dir = '/usr/bin',
-                              user = 'ambari-qa',
-                              conf_dir = '/etc/hadoop/conf',
-                              )
+        try_sleep = 5,
+        tries = 3,
+        bin_dir = '/usr/bin',
+        user = 'ambari-qa',
+        conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/tezsmokeoutput/_SUCCESS',
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/tezsmokeoutput/_SUCCESS',
-                              try_sleep = 6,
-                              tries = 10,
-                              bin_dir = '/usr/bin',
-                              user = 'ambari-qa',
-                              conf_dir = '/etc/hadoop/conf',
-                              )
+        try_sleep = 6,
+        tries = 10,
+        bin_dir = '/usr/bin',
+        user = 'ambari-qa',
+        conf_dir = '/etc/hadoop/conf',
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
 
 

+ 83 - 38
ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py

@@ -20,17 +20,13 @@ limitations under the License.
 from mock.mock import patch, MagicMock
 from mock.mock import patch, MagicMock
 
 
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
-from resource_management.libraries.functions import dynamic_variable_interpretation
 
 
 
 
 class TestPigServiceCheck(RMFTestCase):
 class TestPigServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "PIG/0.12.0.2.0/package"
   COMMON_SERVICES_PACKAGE_DIR = "PIG/0.12.0.2.0/package"
   STACK_VERSION = "2.2"
   STACK_VERSION = "2.2"
 
 
-  @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs")
-  def test_service_check_secure(self, copy_tarball_mock):
-    copy_tarball_mock.return_value = 0
-
+  def test_service_check_secure(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
                        classname="PigServiceCheck",
                        classname="PigServiceCheck",
                        command="service_check",
                        command="service_check",
@@ -38,22 +34,43 @@ class TestPigServiceCheck(RMFTestCase):
                        hdp_stack_version=self.STACK_VERSION,
                        hdp_stack_version=self.STACK_VERSION,
                        target=RMFTestCase.TARGET_COMMON_SERVICES
                        target=RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
-    self.assertResourceCalled("ExecuteHadoop", "dfs -rmr pigsmoke.out passwd; hadoop --config /usr/hdp/current/hadoop-client/conf dfs -put /etc/passwd passwd ",
-      try_sleep=5,
-      tries=3,
-      user="ambari-qa",
-      conf_dir="/usr/hdp/current/hadoop-client/conf",
-      security_enabled=True,
-      principal="ambari-qa@EXAMPLE.COM",
-      keytab="/etc/security/keytabs/smokeuser.headless.keytab",
-      bin_dir="/usr/hdp/current/hadoop-client/bin",
-      kinit_path_local="/usr/bin/kinit"
+    
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'ambari-qa',
+        action = ['delete_on_execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/etc/passwd',
+        user = 'ambari-qa',
+        action = ['create_on_execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'file',
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs@EXAMPLE.COM',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )
-
     self.assertResourceCalled("File", "/tmp/pigSmoke.sh",
     self.assertResourceCalled("File", "/tmp/pigSmoke.sh",
       content=StaticFile("pigSmoke.sh"),
       content=StaticFile("pigSmoke.sh"),
       mode=0755
       mode=0755
     )
     )
+    
+    
 
 
     self.assertResourceCalled("Execute", "pig /tmp/pigSmoke.sh",
     self.assertResourceCalled("Execute", "pig /tmp/pigSmoke.sh",
       path=["/usr/hdp/current/pig-client/bin:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin"],
       path=["/usr/hdp/current/pig-client/bin:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin"],
@@ -62,26 +79,54 @@ class TestPigServiceCheck(RMFTestCase):
       try_sleep=5
       try_sleep=5
     )
     )
 
 
-    self.assertResourceCalled("ExecuteHadoop", "fs -test -e pigsmoke.out",
-      user="ambari-qa",
-      bin_dir="/usr/hdp/current/hadoop-client/bin",
-      conf_dir="/usr/hdp/current/hadoop-client/conf"
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
+        bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        user = 'ambari-qa',
+        conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )
-
-    # Specific to HDP 2.2 and kerberized cluster
-    self.assertResourceCalled("ExecuteHadoop", "dfs -rmr pigsmoke.out passwd; hadoop --config /usr/hdp/current/hadoop-client/conf dfs -put /etc/passwd passwd ",
-      tries=3,
-      try_sleep=5,
-      user="ambari-qa",
-      conf_dir="/usr/hdp/current/hadoop-client/conf",
-      keytab="/etc/security/keytabs/smokeuser.headless.keytab",
-      principal="ambari-qa@EXAMPLE.COM",
-      security_enabled=True,
-      kinit_path_local="/usr/bin/kinit",
-      bin_dir="/usr/hdp/current/hadoop-client/bin"
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'ambari-qa',
+        action = ['delete_on_execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/etc/passwd',
+        user = 'ambari-qa',
+        action = ['create_on_execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'file',
+    )
+    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.2.0.0/tez//tez.tar.gz',
+        security_enabled = True,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        source = '/usr/hdp/current/tez-client/lib/tez.tar.gz',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs@EXAMPLE.COM',
+        owner = 'hdfs',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        type = 'file',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs@EXAMPLE.COM',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )
-
-    copy_tarball_mock.assert_called_once_with("tez", "hadoop-client", "ambari-qa", "hdfs", "hadoop")
 
 
     self.assertResourceCalled("Execute", "/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;",
     self.assertResourceCalled("Execute", "/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;",
       user="ambari-qa")
       user="ambari-qa")
@@ -93,10 +138,10 @@ class TestPigServiceCheck(RMFTestCase):
       user="ambari-qa"
       user="ambari-qa"
     )
     )
 
 
-    self.assertResourceCalled("ExecuteHadoop", "fs -test -e pigsmoke.out",
-      user="ambari-qa",
-      bin_dir="/usr/hdp/current/hadoop-client/bin",
-      conf_dir="/usr/hdp/current/hadoop-client/conf"
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
+        bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        user = 'ambari-qa',
+        conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 

+ 76 - 12
ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py

@@ -93,6 +93,28 @@ class TestJobHistoryServer(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/spark.service.keytab spark/localhost@EXAMPLE.COM; ',
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/spark.service.keytab spark/localhost@EXAMPLE.COM; ',
         user = 'spark',
         user = 'spark',
     )
     )
+    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.2.0.0/tez//tez.tar.gz',
+        security_enabled = True,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        keytab = UnknownConfigurationMock(),
+        source = '/usr/hdp/current/tez-client/lib/tez.tar.gz',
+        kinit_path_local = '/usr/bin/kinit',
+        user = UnknownConfigurationMock(),
+        owner = UnknownConfigurationMock(),
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        type = 'file',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = UnknownConfigurationMock(),
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
     self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-history-server.sh',
     self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-history-server.sh',
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = 'ls /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid >/dev/null 2>&1 && ps -p `cat /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid >/dev/null 2>&1 && ps -p `cat /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid` >/dev/null 2>&1',
@@ -128,16 +150,26 @@ class TestJobHistoryServer(RMFTestCase):
         group = 'hadoop',
         group = 'hadoop',
         recursive = True,
         recursive = True,
     )
     )
-    self.assertResourceCalled('HdfsDirectory', '/user/spark',
+    self.assertResourceCalled('HdfsResource', '/user/spark',
         security_enabled = False,
         security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
-        conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        mode = 0775,
+        user = 'hdfs',
         owner = 'spark',
         owner = 'spark',
-        bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        action = ['create'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0775,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         key_value_delimiter = ' ',
         key_value_delimiter = ' ',
@@ -175,16 +207,26 @@ class TestJobHistoryServer(RMFTestCase):
         group = 'hadoop',
         group = 'hadoop',
         recursive = True,
         recursive = True,
     )
     )
-    self.assertResourceCalled('HdfsDirectory', '/user/spark',
+    self.assertResourceCalled('HdfsResource', '/user/spark',
         security_enabled = True,
         security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
-        conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        hdfs_user = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        mode = 0775,
+        user = UnknownConfigurationMock(),
         owner = 'spark',
         owner = 'spark',
-        bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        action = ['create'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        mode = 0775,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = UnknownConfigurationMock(),
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         key_value_delimiter = ' ',
         key_value_delimiter = ' ',
@@ -229,6 +271,28 @@ class TestJobHistoryServer(RMFTestCase):
                        mocks_dict = mocks_dict)
                        mocks_dict = mocks_dict)
 
 
     self.assertResourceCalled('Execute', 'hdp-select set spark-historyserver {0}'.format(version))
     self.assertResourceCalled('Execute', 'hdp-select set spark-historyserver {0}'.format(version))
+    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.2.0.0/tez//tez.tar.gz',
+        security_enabled = False,
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        keytab = UnknownConfigurationMock(),
+        source = '/usr/hdp/current/tez-client/lib/tez.tar.gz',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        type = 'file',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
     self.assertEquals(2, mocks_dict['call'].call_count)
     self.assertEquals(2, mocks_dict['call'].call_count)

+ 12 - 1
ambari-server/src/test/python/stacks/2.2/configs/default.json

@@ -185,7 +185,18 @@
             "ignore_groupsusers_create": "false",
             "ignore_groupsusers_create": "false",
             "smokeuser": "ambari-qa",
             "smokeuser": "ambari-qa",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "user_group": "hadoop"
+            "user_group": "hadoop",
+	        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
+	        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
+	        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
+	        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
+	        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
+	        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
+	        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
+	        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
+	        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
+	        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
+	        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
         },
         },
         "ranger-knox-plugin-properties": {
         "ranger-knox-plugin-properties": {
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 

+ 12 - 1
ambari-server/src/test/python/stacks/2.2/configs/secured.json

@@ -173,7 +173,18 @@
             "user_group": "hadoop",
             "user_group": "hadoop",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
             "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
             "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
-            "kinit_path_local": "/usr/bin"
+            "kinit_path_local": "/usr/bin",
+	        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
+	        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
+	        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
+	        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
+	        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
+	        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
+	        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
+	        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
+	        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
+	        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
+	        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
         },
         },
         "webhcat-site": {
         "webhcat-site": {
             "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",
             "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",

+ 35 - 28
ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py

@@ -33,35 +33,42 @@ class TestMahoutClient(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
 
 
-    self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /user/ambari-qa/mahoutsmokeoutput /user/ambari-qa/mahoutsmokeinput',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              try_sleep = 5,
-                              kinit_path_local = '/usr/bin/kinit',
-                              tries = 3,
-                              user = 'ambari-qa',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              principal = UnknownConfigurationMock(),
-                              )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /user/ambari-qa/mahoutsmokeinput',
-                              try_sleep = 5,
-                              tries = 3,
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              user = 'ambari-qa',
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              )
     self.assertResourceCalled('File', '/tmp/sample-mahout-test.txt',
     self.assertResourceCalled('File', '/tmp/sample-mahout-test.txt',
-                              content = 'Test text which will be converted to sequence file.',
-                              mode = 0755,
-                              )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -put /tmp/sample-mahout-test.txt /user/ambari-qa/mahoutsmokeinput/',
-                              try_sleep = 5,
-                              tries = 3,
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              user = 'ambari-qa',
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              )
+        content = 'Test text which will be converted to sequence file.',
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mahoutsmokeinput',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mahoutsmokeinput/sample-mahout-test.txt',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/tmp/sample-mahout-test.txt',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'file',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
     self.assertResourceCalled('Execute', 'mahout seqdirectory --input /user/ambari-qa/mahoutsmokeinput/'
     self.assertResourceCalled('Execute', 'mahout seqdirectory --input /user/ambari-qa/mahoutsmokeinput/'
                                          'sample-mahout-test.txt --output /user/ambari-qa/mahoutsmokeoutput/ '
                                          'sample-mahout-test.txt --output /user/ambari-qa/mahoutsmokeoutput/ '
                                          '--charset utf-8',
                                          '--charset utf-8',

+ 42 - 0
contrib/fast-hdfs-resource/dependency-reduced-pom.xml

@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.ambari</groupId>
+  <artifactId>fast-hdfs-resource</artifactId>
+  <name>fast-hdfs-resource</name>
+  <version>0.0.1-SNAPSHOT</version>
+  <url>http://maven.apache.org</url>
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-shade-plugin</artifactId>
+        <version>2.3</version>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+            <configuration>
+              <transformers>
+                <transformer>
+                  <mainClass>org.apache.ambari.fast_hdfs_resource.Runner</mainClass>
+                </transformer>
+              </transformers>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <repositories>
+    <repository>
+      <id>hdp.internal</id>
+      <url>http://repo1.maven.org/maven2</url>
+    </repository>
+  </repositories>
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+</project>
+

+ 86 - 0
contrib/fast-hdfs-resource/pom.xml

@@ -0,0 +1,86 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <groupId>org.apache.ambari</groupId>
+  <artifactId>fast-hdfs-resource</artifactId>
+  <version>0.0.1-SNAPSHOT</version>
+  <packaging>jar</packaging>
+
+  <name>fast-hdfs-resource</name>
+  <url>http://maven.apache.org</url>
+  <repositories>
+    <repository>
+      <id>hdp.internal</id>
+      <url>http://repo1.maven.org/maven2</url>
+    </repository>
+  </repositories>
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-tools</artifactId>
+      <version>1.2.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-core</artifactId>
+      <version>1.2.1</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+      <version>2.2.2</version>
+    </dependency>
+  </dependencies>
+
+
+  <!-- Create executable jar with the application entry point -->
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <version>2.3</version>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+            <configuration>
+              <transformers>
+                <transformer
+                  implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                  <mainClass>org.apache.ambari.fast_hdfs_resource.Runner
+                  </mainClass>
+                </transformer>
+              </transformers>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+</project>

+ 57 - 0
contrib/fast-hdfs-resource/resources/example.json

@@ -0,0 +1,57 @@
+[
+{
+	"target":"/tmp/some999",
+	"type":"directory",
+	"action":"delete"
+},
+{
+	"target":"/tmp/some999/more/dirs/for/recursive/tests",
+	"type":"directory",
+	"action":"create"
+},
+{
+	"target":"/tmp/some999/more/dirs/for/recursive/tests/file_empty.txt",
+	"type":"file",
+	"action":"create"
+},
+{
+	"target":"/tmp/some999",
+	"type":"directory",
+	"action":"create",
+	"owner":"oozie"
+},
+{
+	"target":"/tmp/some999",
+	"type":"directory",
+	"action":"create",
+	"group":"hive"
+},
+{
+	"target":"/tmp/some999",
+	"type":"directory",
+	"action":"create",
+	"mode":"777"
+},
+{
+	"target":"/tmp/some999/more/dirs",
+	"type":"directory",
+	"action":"create",
+	"owner":"yarn",
+	"group":"mapred",
+	"recursiveChown":true,
+	"mode":"757",
+	"recursiveChmod":true
+},
+{
+	"source":"/tmp/my.txt",
+	"target":"/tmp/some999/my_file.txt",
+	"type":"file",
+	"action":"create"
+},
+{
+	"source":"/tmp/a",
+	"target":"/tmp/some999/a_dir",
+	"type":"directory",
+	"action":"create"
+}
+]

Some files were not shown because too many files changed in this diff