Browse Source

Revert "AMBARI-8932. Creating hdfs directories on deploy takes too long, Part 2, reduces deploy time by ~6min (aonishuk)"

Andrew Onishuk 10 năm trước cách đây
mục cha
commit
8cf0e91550
67 tập tin đã thay đổi với 1611 bổ sung1710 xóa
  1. 0 7
      ambari-agent/pom.xml
  2. 3 3
      ambari-agent/src/test/python/resource_management/TestContentSources.py
  3. 68 0
      ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
  4. 5 5
      ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
  5. 2 2
      ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
  6. 5 5
      ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
  7. 1 1
      ambari-common/src/main/python/resource_management/core/source.py
  8. 0 1
      ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
  9. 0 72
      ambari-common/src/main/python/resource_management/libraries/functions/get_namenode_states.py
  10. 1 28
      ambari-common/src/main/python/resource_management/libraries/functions/version.py
  11. 2 1
      ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
  12. 89 0
      ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
  13. 112 0
      ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
  14. 0 109
      ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
  15. 2 1
      ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
  16. 40 0
      ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
  17. 45 0
      ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
  18. 0 77
      ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
  19. 3 5
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
  20. 7 9
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
  21. 4 6
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
  22. 7 9
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
  23. 2 1
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
  24. BIN
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
  25. 0 5
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
  26. 4 7
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
  27. 2 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
  28. 8 13
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
  29. 39 15
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
  30. 3 27
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
  31. 3 83
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
  32. 6 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
  33. 36 19
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
  34. 15 51
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
  35. 72 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
  36. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
  37. 4 29
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
  38. 2 4
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
  39. 7 8
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
  40. 7 13
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
  41. 31 44
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
  42. 7 9
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
  43. 3 21
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
  44. 9 19
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
  45. 10 17
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
  46. 89 106
      ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
  47. 90 0
      ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
  48. 0 6
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
  49. 0 6
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
  50. 189 225
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
  51. 29 41
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
  52. 0 6
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
  53. 0 12
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
  54. 80 192
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  55. 134 0
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
  56. 21 45
      ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
  57. 22 90
      ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
  58. 132 156
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
  59. 0 2
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
  60. 132 0
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
  61. 0 2
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
  62. 1 3
      ambari-server/src/test/python/stacks/2.0.6/configs/default.json
  63. 20 23
      ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
  64. 0 1
      ambari-server/src/test/python/unitTests.py
  65. 1 1
      ambari-web/app/config.js
  66. 0 58
      contrib/fast-hdfs-resource/dependency-reduced-pom.xml
  67. 4 4
      contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java

+ 0 - 7
ambari-agent/pom.xml

@@ -402,12 +402,6 @@
                 </source>
                 </source>
               </sources>
               </sources>
             </mapping>
             </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/lib</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
           </mappings>
           </mappings>
         </configuration>
         </configuration>
       </plugin>
       </plugin>
@@ -522,7 +516,6 @@
                 <path>/var/lib/${project.artifactId}/data/tmp</path>
                 <path>/var/lib/${project.artifactId}/data/tmp</path>
                 <path>/var/lib/${project.artifactId}/keys</path>
                 <path>/var/lib/${project.artifactId}/keys</path>
                 <path>${package.log.dir}</path>
                 <path>${package.log.dir}</path>
-                <path>/var/lib/${project.artifactId}/lib</path>
               </paths>
               </paths>
               <mapper>
               <mapper>
                 <type>perm</type>
                 <type>perm</type>

+ 3 - 3
ambari-agent/src/test/python/resource_management/TestContentSources.py

@@ -310,7 +310,7 @@ class TestContentSources(TestCase):
       content = template.get_content()
       content = template.get_content()
     self.assertEqual(open_mock.call_count, 1)
     self.assertEqual(open_mock.call_count, 1)
 
 
-    self.assertEqual(u'test template content', content)
+    self.assertEqual(u'test template content\n', content)
     open_mock.assert_called_with('/absolute/path/test.j2', 'rb')
     open_mock.assert_called_with('/absolute/path/test.j2', 'rb')
     self.assertEqual(getmtime_mock.call_count, 1)
     self.assertEqual(getmtime_mock.call_count, 1)
     getmtime_mock.assert_called_with('/absolute/path/test.j2')
     getmtime_mock.assert_called_with('/absolute/path/test.j2')
@@ -323,7 +323,7 @@ class TestContentSources(TestCase):
       template = InlineTemplate("{{test_arg1}} template content", [], test_arg1 = "test")
       template = InlineTemplate("{{test_arg1}} template content", [], test_arg1 = "test")
       content = template.get_content()
       content = template.get_content()
 
 
-    self.assertEqual(u'test template content', content)
+    self.assertEqual(u'test template content\n', content)
 
 
   def test_template_imports(self):
   def test_template_imports(self):
     """
     """
@@ -339,4 +339,4 @@ class TestContentSources(TestCase):
     with Environment("/base") as env:
     with Environment("/base") as env:
       template = InlineTemplate("{{test_arg1}} template content {{os.path.join(path[0],path[1])}}", [os], test_arg1 = "test", path = ["/one","two"])
       template = InlineTemplate("{{test_arg1}} template content {{os.path.join(path[0],path[1])}}", [os], test_arg1 = "test", path = ["/one","two"])
       content = template.get_content()
       content = template.get_content()
-    self.assertEqual(u'test template content /one/two', content)
+    self.assertEqual(u'test template content /one/two\n', content)

+ 68 - 0
ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py

@@ -0,0 +1,68 @@
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+from mock.mock import patch, MagicMock
+from resource_management import *
+from resource_management.core import shell
+
+@patch.object(shell, "call", new = MagicMock(return_value=(1, "")))
+@patch.object(System, "os_family", new = 'redhat')
+class TestCopyFromLocal(TestCase):
+
+  @patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
+  def test_run_default_args(self, execute_hadoop_mock):
+    with Environment() as env:
+      CopyFromLocal('/user/testdir/*.files',
+        owner='user1',
+        dest_dir='/apps/test/',
+        kinnit_if_needed='',
+        hdfs_user='hdfs'
+      )
+      self.assertEqual(execute_hadoop_mock.call_count, 2)
+      call_arg_list = execute_hadoop_mock.call_args_list
+      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
+                       call_arg_list[0][0][0].command)
+      print call_arg_list[0][0][0].arguments
+      self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls /apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
+                        call_arg_list[0][0][0].arguments)
+      self.assertEquals('fs -chown user1 /apps/test//*.files', call_arg_list[1][0][0].command)
+      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
+
+
+  @patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
+  def test_run_with_chmod(self, execute_hadoop_mock):
+    with Environment() as env:
+      CopyFromLocal('/user/testdir/*.files',
+        mode=0655,
+        owner='user1',
+        group='hdfs',
+        dest_dir='/apps/test/',
+        kinnit_if_needed='',
+        hdfs_user='hdfs'
+      )
+      self.assertEqual(execute_hadoop_mock.call_count, 3)
+      call_arg_list = execute_hadoop_mock.call_args_list
+      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
+                       call_arg_list[0][0][0].command)
+      self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls /apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
+                        call_arg_list[0][0][0].arguments)
+      self.assertEquals('fs -chown user1:hdfs /apps/test//*.files', call_arg_list[1][0][0].command)
+      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
+
+

+ 5 - 5
ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py

@@ -65,7 +65,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={}
                      properties={}
       )
       )
 
 
-    create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', u'# Generated by Apache Ambari. Today is Wednesday\n    \n    ')
+    create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', u'# Generated by Apache Ambari. Today is Wednesday\n    \n    \n')
     ensure_mock.assert_called()
     ensure_mock.assert_called()
 
 
 
 
@@ -98,7 +98,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={},
                      properties={},
       )
       )
 
 
-    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated by Apache Ambari. Some other day\n    \n    ')
+    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated by Apache Ambari. Some other day\n    \n    \n')
     ensure_mock.assert_called()
     ensure_mock.assert_called()
 
 
 
 
@@ -131,7 +131,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={'property1': 'value1'},
                      properties={'property1': 'value1'},
       )
       )
 
 
-    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty1=value1\n    ')
+    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty1=value1\n    \n')
     ensure_mock.assert_called()
     ensure_mock.assert_called()
 
 
 
 
@@ -169,7 +169,7 @@ class TestPropertiesFIleResource(TestCase):
                      },
                      },
       )
       )
 
 
-    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - %m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    ")
+    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - %m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    \n")
     ensure_mock.assert_called()
     ensure_mock.assert_called()
 
 
 
 
@@ -206,5 +206,5 @@ class TestPropertiesFIleResource(TestCase):
       )
       )
 
 
     read_file_mock.assert_called()
     read_file_mock.assert_called()
-    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty_1=value1\n    ')
+    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty_1=value1\n    \n')
     ensure_mock.assert_called()
     ensure_mock.assert_called()

+ 2 - 2
ambari-agent/src/test/python/resource_management/TestRepositoryResource.py

@@ -144,7 +144,7 @@ class TestRepositoryResource(TestCase):
       template_content = template_item[1]['content'].get_content()
       template_content = template_item[1]['content'].get_content()
       
       
       self.assertEquals(template_name, '/tmp/1.txt')
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c')
+      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n')
       
       
       copy_item = str(file_mock.call_args_list[1])
       copy_item = str(file_mock.call_args_list[1])
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))")
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))")
@@ -179,7 +179,7 @@ class TestRepositoryResource(TestCase):
       template_content = template_item[1]['content'].get_content()
       template_content = template_item[1]['content'].get_content()
       
       
       self.assertEquals(template_name, '/tmp/1.txt')
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c')
+      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n')
       
       
       self.assertEqual(file_mock.call_count, 1)
       self.assertEqual(file_mock.call_count, 1)
       self.assertEqual(execute_mock.call_count, 0)
       self.assertEqual(execute_mock.call_count, 0)

+ 5 - 5
ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py

@@ -62,7 +62,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 configuration_attributes={}
                 )
                 )
 
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 2014-02-->\n    <configuration>\n    \n  </configuration>')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n  </configuration>\n')
 
 
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -91,7 +91,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={'attr': {'property1': 'attr_value'}}
                 configuration_attributes={'attr': {'property1': 'attr_value'}}
                 )
                 )
 
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name>property1</name>\n      <value>value1</value>\n      <attr>attr_value</attr>\n    </property>\n    \n  </configuration>')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name>property1</name>\n      <value>value1</value>\n      <attr>attr_value</attr>\n    </property>\n    \n  </configuration>\n')
 
 
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -144,8 +144,8 @@ class TestXmlConfigResource(TestCase):
                     }
                     }
                 })
                 })
 
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      <attr1>x</attr1>\n    </property>\n    \n    <property>\n      <name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    \n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p %c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n    <property>\n      <name>prop.4</name>\n      <value>${oozie.log.dir}/oozie.log</value>\n      <attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    </property>\n    \n    <property>\n      <name>prop.empty</name>\n      <value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n    \n  </configuration>')
-  
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      <attr1>x</attr1>\n    </property>\n    \n    <property>\n      <name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    \n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p %c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n    <property>\n      <name>prop.4</name>\n      <value>${oozie.log.dir}/oozie.log</value>\n      <attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    </property>\n    \n    <property>\n      <name>prop.empty</name>\n      <value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n    \n  </configuration>\n')
+
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch.object(sudo, "create_file")
   @patch.object(sudo, "create_file")
   @patch.object(os.path, "exists")
   @patch.object(os.path, "exists")
@@ -177,7 +177,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 configuration_attributes={}
                 )
                 )
 
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>first</name>\n      <value>should be first</value>\n    </property>\n    \n    <property>\n      <name>second</name>\n      <value>should be second</value>\n    </property>\n    \n    <property>\n      <name>third</name>\n      <value>should be third</value>\n    </property>\n    \n    <property>\n      <name>z_last</name>\n      <value>should be last</value>\n    </property>\n    \n  </configuration>')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>first</name>\n      <value>should be first</value>\n    </property>\n    \n    <property>\n      <name>second</name>\n      <value>should be second</value>\n    </property>\n    \n    <property>\n      <name>third</name>\n      <value>should be third</value>\n    </property>\n    \n    <property>\n      <name>z_last</name>\n      <value>should be last</value>\n    </property>\n    \n  </configuration>\n')
 
 
   @patch("resource_management.libraries.providers.xml_config.File")
   @patch("resource_management.libraries.providers.xml_config.File")
   @patch.object(os.path, "exists")
   @patch.object(os.path, "exists")

+ 1 - 1
ambari-common/src/main/python/resource_management/core/source.py

@@ -124,7 +124,7 @@ else:
       self.context.update(variables)
       self.context.update(variables)
       
       
       rendered = self.template.render(self.context)
       rendered = self.template.render(self.context)
-      return rendered
+      return rendered + "\n" if not rendered.endswith('\n') else rendered
     
     
   class InlineTemplate(Template):
   class InlineTemplate(Template):
     def __init__(self, name, extra_imports=[], **kwargs):
     def __init__(self, name, extra_imports=[], **kwargs):

+ 0 - 1
ambari-common/src/main/python/resource_management/libraries/functions/__init__.py

@@ -33,7 +33,6 @@ from resource_management.libraries.functions.get_port_from_url import *
 from resource_management.libraries.functions.hive_check import *
 from resource_management.libraries.functions.hive_check import *
 from resource_management.libraries.functions.version import *
 from resource_management.libraries.functions.version import *
 from resource_management.libraries.functions.format_jvm_option import *
 from resource_management.libraries.functions.format_jvm_option import *
-from resource_management.libraries.functions.get_namenode_states import *
 
 
 IS_WINDOWS = platform.system() == "Windows"
 IS_WINDOWS = platform.system() == "Windows"
 
 

+ 0 - 72
ambari-common/src/main/python/resource_management/libraries/functions/get_namenode_states.py

@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management.libraries.script import UnknownConfiguration
-
-__all__ = ["get_namenode_states", "get_active_namenode"]
-
-HDFS_NN_STATE_ACTIVE = 'active'
-HDFS_NN_STATE_STANDBY = 'standby'
-
-NAMENODE_HTTP_FRAGMENT = 'dfs.namenode.http-address.{0}.{1}'
-JMX_URI_FRAGMENT = "http://{0}/jmx?qry=Hadoop:service=NameNode,name=NameNodeStatus"
-  
-def get_namenode_states(hdfs_site):
-  active_namenodes = []
-  standby_namenodes = []
-  unknown_namenodes = []
-  
-  name_service = hdfs_site['dfs.nameservices']
-  nn_unique_ids_key = 'dfs.ha.namenodes.' + name_service
-
-  # now we have something like 'nn1,nn2,nn3,nn4'
-  # turn it into dfs.namenode.[property].[dfs.nameservices].[nn_unique_id]
-  # ie dfs.namenode.http-address.hacluster.nn1
-  nn_unique_ids = hdfs_site[nn_unique_ids_key].split(',')
-  for nn_unique_id in nn_unique_ids:
-    key = NAMENODE_HTTP_FRAGMENT.format(name_service,nn_unique_id)
-
-    if key in hdfs_site:
-      # use str() to ensure that unicode strings do not have the u' in them
-      value = str(hdfs_site[key])
-
-      try:
-        jmx_uri = JMX_URI_FRAGMENT.format(value)
-        state = get_value_from_jmx(jmx_uri,'State')
-
-        if state == HDFS_NN_STATE_ACTIVE:
-          active_namenodes.append(value)
-        elif state == HDFS_NN_STATE_STANDBY:
-          standby_namenodes.append(value)
-        else:
-          unknown_namenodes.append(value)
-      except:
-        unknown_namenodes.append(value)
-        
-  return active_namenodes, active_namenodes, unknown_namenodes
-
-def get_active_namenode(hdfs_site):
-  active_namenodes = get_namenode_states(hdfs_site)[0]
-  if active_namenodes:
-    return active_namenodes[0]
-  else:
-    return UnknownConfiguration('fs_root')

+ 1 - 28
ambari-common/src/main/python/resource_management/libraries/functions/version.py

@@ -19,13 +19,7 @@ limitations under the License.
 Ambari Agent
 Ambari Agent
 
 
 """
 """
-import os
 import re
 import re
-from resource_management.core import shell
-from resource_management.core.exceptions import Fail
-from resource_management.libraries.script.config_dictionary import UnknownConfiguration
-
-__all__ = ["format_hdp_stack_version", "compare_versions", "get_hdp_build_version"]
 
 
 def _normalize(v, desired_segments=0):
 def _normalize(v, desired_segments=0):
   """
   """
@@ -76,25 +70,4 @@ def compare_versions(version1, version2):
   :return: Returns -1 if version1 is before version2, 0 if they are equal, and 1 if version1 is after version2
   :return: Returns -1 if version1 is before version2, 0 if they are equal, and 1 if version1 is after version2
   """
   """
   max_segments = max(len(version1.split(".")), len(version2.split(".")))
   max_segments = max(len(version1.split(".")), len(version2.split(".")))
-  return cmp(_normalize(version1, desired_segments=max_segments), _normalize(version2, desired_segments=max_segments))
-
-
-def get_hdp_build_version(hdp_stack_version):
-  """
-  Used to check hdp_stack_version for stacks >= 2.2
-  :param hdp_stack_version: version for stacks >= 2.2
-  :return: checked hdp_version (or UnknownConfiguration for stacks < 2.2)
-  """
-  HDP_SELECT = "/usr/bin/hdp-select"
-  if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.2.0.0") >= 0 and os.path.exists(HDP_SELECT):
-    code, out = shell.call('{0} status'.format(HDP_SELECT))
-
-    matches = re.findall(r"([\d\.]+\-\d+)", out)
-    hdp_version = matches[0] if matches and len(matches) > 0 else None
-
-    if not hdp_version:
-      raise Fail("Could not parse HDP version from output of hdp-select: %s" % str(out))
-
-    return hdp_version
-  else:
-    return UnknownConfiguration('hdp_version')
+  return cmp(_normalize(version1, desired_segments=max_segments), _normalize(version2, desired_segments=max_segments))

+ 2 - 1
ambari-common/src/main/python/resource_management/libraries/providers/__init__.py

@@ -39,6 +39,7 @@ PROVIDERS = dict(
     XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
     PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
     MonitorWebserver="resource_management.libraries.providers.monitor_webserver.MonitorWebserverProvider",
     MonitorWebserver="resource_management.libraries.providers.monitor_webserver.MonitorWebserverProvider",
-    HdfsResource="resource_management.libraries.providers.hdfs_resource.HdfsResourceProvider",
+    HdfsDirectory="resource_management.libraries.providers.hdfs_directory.HdfsDirectoryProvider",
+    CopyFromLocal="resource_management.libraries.providers.copy_from_local.CopyFromLocalProvider"
   ),
   ),
 )
 )

+ 89 - 0
ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py

@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+import os
+from resource_management import *
+
+class CopyFromLocalProvider(Provider):
+  def action_run(self):
+
+    path = self.resource.path
+    dest_dir = self.resource.dest_dir
+    dest_file = self.resource.dest_file
+    kinnit_if_needed = self.resource.kinnit_if_needed
+    owner = self.resource.owner
+    group = self.resource.group
+    mode = self.resource.mode
+    hdfs_usr=self.resource.hdfs_user
+    hadoop_conf_path = self.resource.hadoop_conf_dir
+    bin_dir = self.resource.hadoop_bin_dir
+
+
+    if dest_file:
+      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}/{dest_file}")
+      dest_path = dest_dir + dest_file if dest_dir.endswith(os.sep) else dest_dir + os.sep + dest_file
+    else:
+      dest_file_name = os.path.split(path)[1]
+      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}")
+      dest_path = dest_dir + os.sep + dest_file_name
+    # Need to run unless as resource user
+    
+    if kinnit_if_needed:
+      Execute(kinnit_if_needed, 
+              user=owner,
+      )
+    
+    unless_cmd = as_user(format("PATH=$PATH:{bin_dir} hadoop fs -ls {dest_path}"), owner)
+
+    ExecuteHadoop(copy_cmd,
+                  not_if=unless_cmd,
+                  user=owner,
+                  bin_dir=bin_dir,
+                  conf_dir=hadoop_conf_path
+                  )
+
+    if not owner:
+      chown = None
+    else:
+      if not group:
+        chown = owner
+      else:
+        chown = format('{owner}:{group}')
+
+    if chown:
+      chown_cmd = format("fs -chown {chown} {dest_path}")
+
+      ExecuteHadoop(chown_cmd,
+                    user=hdfs_usr,
+                    bin_dir=bin_dir,
+                    conf_dir=hadoop_conf_path)
+    pass
+
+    if mode:
+      dir_mode = oct(mode)[1:]
+      chmod_cmd = format('fs -chmod {dir_mode} {dest_path}')
+
+      ExecuteHadoop(chmod_cmd,
+                    user=hdfs_usr,
+                    bin_dir=bin_dir,
+                    conf_dir=hadoop_conf_path)
+    pass

+ 112 - 0
ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py

@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import os
+
+from resource_management import *
+directories_list = [] #direcotries list for mkdir
+chmod_map = {} #(mode,recursive):dir_list map
+chown_map = {} #(owner,group,recursive):dir_list map
+class HdfsDirectoryProvider(Provider):
+  def action_create_delayed(self):
+    global delayed_directories
+    global chmod_map
+    global chown_map
+
+    if not self.resource.dir_name:
+      return
+
+    dir_name = self.resource.dir_name
+    dir_owner = self.resource.owner
+    dir_group = self.resource.group
+    dir_mode = oct(self.resource.mode)[1:] if self.resource.mode else None
+    directories_list.append(self.resource.dir_name)
+
+    recursive_chown_str = "-R" if self.resource.recursive_chown else ""
+    recursive_chmod_str = "-R" if self.resource.recursive_chmod else ""
+    # grouping directories by mode/owner/group to modify them in one 'chXXX' call
+    if dir_mode:
+      chmod_key = (dir_mode,recursive_chmod_str)
+      if chmod_map.has_key(chmod_key):
+        chmod_map[chmod_key].append(dir_name)
+      else:
+        chmod_map[chmod_key] = [dir_name]
+
+    if dir_owner:
+      owner_key = (dir_owner,dir_group,recursive_chown_str)
+      if chown_map.has_key(owner_key):
+        chown_map[owner_key].append(dir_name)
+      else:
+        chown_map[owner_key] = [dir_name]
+
+  def action_create(self):
+    global delayed_directories
+    global chmod_map
+    global chown_map
+
+    self.action_create_delayed()
+
+    hdp_conf_dir = self.resource.conf_dir
+    hdp_hdfs_user = self.resource.hdfs_user
+    secured = self.resource.security_enabled
+    keytab_file = self.resource.keytab
+    kinit_path = self.resource.kinit_path_local
+    bin_dir = self.resource.bin_dir
+
+    chmod_commands = []
+    chown_commands = []
+
+    for chmod_key, chmod_dirs in chmod_map.items():
+      mode = chmod_key[0]
+      recursive = chmod_key[1]
+      chmod_dirs_str = ' '.join(chmod_dirs)
+      chmod_commands.append(format("hadoop --config {hdp_conf_dir} fs -chmod {recursive} {mode} {chmod_dirs_str}"))
+
+    for chown_key, chown_dirs in chown_map.items():
+      owner = chown_key[0]
+      group = chown_key[1]
+      recursive = chown_key[2]
+      chown_dirs_str = ' '.join(chown_dirs)
+      if owner:
+        chown = owner
+        if group:
+          chown = format("{owner}:{group}")
+        chown_commands.append(format("hadoop --config {hdp_conf_dir} fs -chown {recursive} {chown} {chown_dirs_str}"))
+
+    if secured:
+        Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
+                user=hdp_hdfs_user)
+    #create all directories in one 'mkdir' call
+    dir_list_str = ' '.join(directories_list)
+    #for hadoop 2 we need to specify -p to create directories recursively
+    parent_flag = '`rpm -q hadoop | grep -q "hadoop-1" || echo "-p"`'
+
+    Execute(format('hadoop --config {hdp_conf_dir} fs -mkdir {parent_flag} {dir_list_str} && {chmod_cmd} && {chown_cmd}',
+                   chmod_cmd=' && '.join(chmod_commands),
+                   chown_cmd=' && '.join(chown_commands)),
+            user=hdp_hdfs_user,
+            path=bin_dir,
+            not_if=as_user(format("hadoop --config {hdp_conf_dir} fs -ls {dir_list_str}"), hdp_hdfs_user)
+    )
+
+    directories_list[:] = []
+    chmod_map.clear()
+    chown_map.clear()

+ 0 - 109
ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py

@@ -1,109 +0,0 @@
-# !/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import json
-from resource_management import *
-
-JSON_PATH = '/var/lib/ambari-agent/data/hdfs_resources.json'
-JAR_PATH = '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
-
-RESOURCE_TO_JSON_FIELDS = {
-  'target': 'target',
-  'type': 'type',
-  'action': 'action',
-  'source': 'source',
-  'owner': 'owner',
-  'group': 'group',
-  'mode': 'mode',
-  'recursive_chown': 'recursiveChown',
-  'recursive_chmod': 'recursiveChmod'
-}
-
-
-class HdfsResourceProvider(Provider):
-  def action_delayed(self, action_name):
-    resource = {}
-    env = Environment.get_instance()
-    if not 'hdfs_files' in env.config:
-      env.config['hdfs_files'] = []
-
-    # Check required parameters
-    if not self.resource.type or not self.resource.action:
-      raise Fail("Resource parameter type or action is not set.")
-
-    # Put values in dictionary-resource
-    for field_name, json_field_name in RESOURCE_TO_JSON_FIELDS.iteritems():
-      if field_name == 'action':
-        resource[json_field_name] = action_name
-      elif field_name == 'mode' and self.resource.mode:
-        resource[json_field_name] = oct(self.resource.mode)[1:]
-      elif getattr(self.resource, field_name):
-        resource[json_field_name] = getattr(self.resource, field_name)
-
-    # Add resource to create
-    env.config['hdfs_files'].append(resource)
-
-  def action_create_delayed(self):
-    self.action_delayed("create")
-
-  def action_delete_delayed(self):
-    self.action_delayed("delete")
-
-  def action_execute(self):
-    env = Environment.get_instance()
-
-    # Check required parameters
-    if not self.resource.hadoop_fs or not self.resource.user:
-      raise Fail("Resource parameter hadoop_fs or user is not set.")
-
-    if not 'hdfs_files' in env.config or not env.config['hdfs_files']:
-      raise Fail("No resources to create. Please perform create_delayed"
-                 " or delete_delayed before doing execute action.")
-
-    hadoop_fs = self.resource.hadoop_fs
-    hadoop_bin_dir = self.resource.hadoop_bin_dir
-    hadoop_conf_dir = self.resource.hadoop_conf_dir
-    user = self.resource.user
-    security_enabled = self.resource.security_enabled
-    keytab_file = self.resource.keytab
-    kinit_path = self.resource.kinit_path_local
-    logoutput = self.resource.logoutput
-    jar_path=JAR_PATH
-    json_path=JSON_PATH
-
-    if security_enabled:
-      Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
-              user=user
-      )
-
-    # Write json file to disk
-    with open(JSON_PATH, 'w') as f:
-      f.write(json.dumps(env.config['hdfs_files']))
-
-    # Execute jar to create/delete resources in hadoop
-    Execute(format("hadoop --config {hadoop_conf_dir} jar {jar_path} {json_path} {hadoop_fs}"),
-            user=user,
-            path=[hadoop_bin_dir],
-            logoutput=logoutput,
-    )
-
-    # Clean
-    env.config['hdfs_files'] = []

+ 2 - 1
ambari-common/src/main/python/resource_management/libraries/resources/__init__.py

@@ -26,4 +26,5 @@ from resource_management.libraries.resources.xml_config import *
 from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.repository import *
 from resource_management.libraries.resources.repository import *
 from resource_management.libraries.resources.monitor_webserver import *
 from resource_management.libraries.resources.monitor_webserver import *
-from resource_management.libraries.resources.hdfs_resource import *
+from resource_management.libraries.resources.hdfs_directory import *
+from resource_management.libraries.resources.copy_from_local import *

+ 40 - 0
ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py

@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+_all__ = ["CopyFromLocal"]
+from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
+
+class CopyFromLocal(Resource):
+  action = ForcedListArgument(default="run")
+
+  path = ResourceArgument(default=lambda obj: obj.name)
+  dest_dir = ResourceArgument(required=True)
+  dest_file = ResourceArgument()
+  owner = ResourceArgument(required=True)
+  group = ResourceArgument()
+  mode = ResourceArgument()
+  kinnit_if_needed = ResourceArgument(default='')
+  hadoop_conf_dir = ResourceArgument(default='/etc/hadoop/conf')
+  hdfs_user = ResourceArgument(default='hdfs')
+  hadoop_bin_dir = ResourceArgument(default='/usr/bin')
+
+  actions = Resource.actions + ["run"]

+ 45 - 0
ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py

@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+_all__ = ["HdfsDirectory"]
+from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
+
+class HdfsDirectory(Resource):
+  action = ForcedListArgument()
+
+  dir_name = ResourceArgument(default=lambda obj: obj.name)
+  owner = ResourceArgument()
+  group = ResourceArgument()
+  mode = ResourceArgument()
+  recursive_chown = BooleanArgument(default=False)
+  recursive_chmod = BooleanArgument(default=False)
+
+  conf_dir = ResourceArgument()
+  security_enabled = BooleanArgument(default=False)
+  keytab = ResourceArgument()
+  kinit_path_local = ResourceArgument()
+  hdfs_user = ResourceArgument()
+  bin_dir = ResourceArgument(default="")
+
+  #action 'create' immediately creates all pending directory in efficient manner
+  #action 'create_delayed' add directory to list of pending directories
+  actions = Resource.actions + ["create","create_delayed"]

+ 0 - 77
ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py

@@ -1,77 +0,0 @@
-# !/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["HdfsResource"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-"""
-Calling a lot of hadoop commands takes too much time.
-The cause is that for every call new connection initialized, with datanodes, namenode.
-
-While this resource can gather the dicteroies/files to create/delete/copyFromLocal.
-And after just with one call create all that.
-
-action = create_delayed / delete_delayed. Are for gathering information  about what you want
-to create.
-
-After everything is gathered you should execute action = execute. To perform delayed actions
-
-The resource is a replacement for the following operations:
-  1) hadoop fs -rmr
-  2) hadoop fs -copyFromLocal
-  3) hadoop fs -put
-  4) hadoop fs -mkdir
-  5) hadoop fs -touchz
-  6) hadoop fs -chmod
-  7) hadoop fs -chown
-"""
-
-
-class HdfsResource(Resource):
-  # Required: {target, type, action}
-  # path to hadoop file/directory
-  target = ResourceArgument(default=lambda obj: obj.name)
-  # "directory" or "file"
-  type = ResourceArgument()
-  # "create_delayed" or "delete_delayed" or "execute"
-  action = ForcedListArgument()
-  # Required for action="execute", path to fs like "hdfs://c6401.ambari.apache.org:8020"
-  hadoop_fs = ResourceArgument()
-  # if present - copies file/directory from local path {source} to hadoop path - {target}
-  source = ResourceArgument()
-  owner = ResourceArgument()
-  group = ResourceArgument()
-  mode = ResourceArgument()
-  logoutput = ResourceArgument()
-  recursive_chown = BooleanArgument(default=False)
-  recursive_chmod = BooleanArgument(default=False)
-
-  security_enabled = BooleanArgument(default=False)
-  keytab = ResourceArgument()
-  kinit_path_local = ResourceArgument()
-  user = ResourceArgument()
-  hadoop_bin_dir = ResourceArgument()
-  hadoop_conf_dir = ResourceArgument()
-
-  #action 'execute' immediately creates all pending files/directories in efficient manner
-  #action 'create_delayed/delete_delayed' adds file/directory to list of pending directories
-  actions = Resource.actions + ["create_delayed", "delete_delayed", "execute"]

+ 3 - 5
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py

@@ -84,19 +84,17 @@ def falcon(type, action = None):
   if type == 'server':
   if type == 'server':
     if action == 'config':
     if action == 'config':
       if params.store_uri[0:4] == "hdfs":
       if params.store_uri[0:4] == "hdfs":
-        params.HdfsResource(params.store_uri,
-                             type="file",
+        params.HdfsDirectory(params.store_uri,
                              action="create_delayed",
                              action="create_delayed",
                              owner=params.falcon_user,
                              owner=params.falcon_user,
                              mode=0755
                              mode=0755
         )
         )
-      params.HdfsResource(params.flacon_apps_dir,
-                           type="directory",
+      params.HdfsDirectory(params.flacon_apps_dir,
                            action="create_delayed",
                            action="create_delayed",
                            owner=params.falcon_user,
                            owner=params.falcon_user,
                            mode=0777#TODO change to proper mode
                            mode=0777#TODO change to proper mode
       )
       )
-      params.HdfsResource(None, action="execute")
+      params.HdfsDirectory(None, action="create")
       Directory(params.falcon_local_dir,
       Directory(params.falcon_local_dir,
                 owner=params.falcon_user,
                 owner=params.falcon_user,
                 recursive=True,
                 recursive=True,

+ 7 - 9
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py

@@ -90,17 +90,15 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-fs_root = config['configurations']['core-site']['fs.defaultFS']
 import functools
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 )

+ 4 - 6
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py

@@ -135,19 +135,17 @@ def hbase(name=None # 'master' or 'regionserver' or 'client'
       group=params.user_group,
       group=params.user_group,
       owner=params.hbase_user
       owner=params.hbase_user
     )
     )
-  if name == "master":
-    params.HdfsResource(params.hbase_hdfs_root_dir,
-                         type="directory",
+  if name in ["master","regionserver"]:
+    params.HdfsDirectory(params.hbase_hdfs_root_dir,
                          action="create_delayed",
                          action="create_delayed",
                          owner=params.hbase_user
                          owner=params.hbase_user
     )
     )
-    params.HdfsResource(params.hbase_staging_dir,
-                         type="directory",
+    params.HdfsDirectory(params.hbase_staging_dir,
                          action="create_delayed",
                          action="create_delayed",
                          owner=params.hbase_user,
                          owner=params.hbase_user,
                          mode=0711
                          mode=0711
     )
     )
-    params.HdfsResource(None, action="execute")
+    params.HdfsDirectory(None, action="create")
 
 
 def hbase_TemplateConfig(name, 
 def hbase_TemplateConfig(name, 
                          tag=None
                          tag=None

+ 7 - 9
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py

@@ -137,19 +137,17 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-fs_root = config['configurations']['core-site']['fs.defaultFS']
 import functools
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 )
 
 
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:

+ 2 - 1
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py

@@ -26,8 +26,9 @@ class HbaseServiceCheck(Script):
   def service_check(self, env):
   def service_check(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-
+    
     output_file = "/apps/hbase/data/ambarismoketest"
     output_file = "/apps/hbase/data/ambarismoketest"
+    test_cmd = format("fs -test -e {output_file}")
     smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smoke_test_user};") if params.security_enabled else ""
     smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smoke_test_user};") if params.security_enabled else ""
     hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
     hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
   
   

BIN
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar


+ 0 - 5
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py

@@ -78,11 +78,6 @@ def hdfs(name=None):
        owner=tc_owner,
        owner=tc_owner,
        content=Template("slaves.j2")
        content=Template("slaves.j2")
   )
   )
-
-  # for source-code of jar goto contrib/fast-hdfs-resource
-  File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
-       content=StaticFile("fast-hdfs-resource.jar")
-  )
   
   
   if params.lzo_enabled:
   if params.lzo_enabled:
     Package(params.lzo_packages_for_current_host)
     Package(params.lzo_packages_for_current_host)

+ 4 - 7
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py

@@ -135,21 +135,18 @@ def create_name_dirs(directories):
 def create_hdfs_directories(check):
 def create_hdfs_directories(check):
   import params
   import params
 
 
-  params.HdfsResource("/tmp",
-                       type="directory",
+  params.HdfsDirectory("/tmp",
                        action="create_delayed",
                        action="create_delayed",
                        owner=params.hdfs_user,
                        owner=params.hdfs_user,
                        mode=0777
                        mode=0777
   )
   )
-  params.HdfsResource(params.smoke_hdfs_user_dir,
-                       type="directory",
+  params.HdfsDirectory(params.smoke_hdfs_user_dir,
                        action="create_delayed",
                        action="create_delayed",
                        owner=params.smoke_user,
                        owner=params.smoke_user,
                        mode=params.smoke_hdfs_user_mode
                        mode=params.smoke_hdfs_user_mode
   )
   )
-  params.HdfsResource(None, 
-                      action="execute",
-                      only_if=check #skip creation when HA not active
+  params.HdfsDirectory(None, action="create",
+                       only_if=check #skip creation when HA not active
   )
   )
 
 
 def format_namenode(force=None):
 def format_namenode(force=None):

+ 2 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py

@@ -19,6 +19,8 @@ limitations under the License.
 
 
 from resource_management import *
 from resource_management import *
 from utils import service
 from utils import service
+from utils import hdfs_directory
+
 
 
 def snamenode(action=None, format=False):
 def snamenode(action=None, format=False):
   import params
   import params

+ 8 - 13
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py

@@ -81,8 +81,6 @@ else:
 hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 limits_conf_dir = "/etc/security/limits.d"
 limits_conf_dir = "/etc/security/limits.d"
-# Path to which fast-hdfs-resource.jar will be installed
-ambari_libs_dir = "/var/lib/ambari-agent/lib"
 
 
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 ulimit_cmd = "ulimit -c unlimited && "
 ulimit_cmd = "ulimit -c unlimited && "
@@ -226,22 +224,19 @@ if security_enabled:
   nn_kinit_cmd = format("{kinit_path_local} -kt {_nn_keytab} {_nn_principal_name};")  
   nn_kinit_cmd = format("{kinit_path_local} -kt {_nn_keytab} {_nn_principal_name};")  
 else:
 else:
   dn_kinit_cmd = ""
   dn_kinit_cmd = ""
-  nn_kinit_cmd = ""
-
-fs_root = config['configurations']['core-site']['fs.defaultFS']
+  nn_kinit_cmd = ""  
 
 
 import functools
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 )
 
 
 io_compression_codecs = config['configurations']['core-site']['io.compression.codecs']
 io_compression_codecs = config['configurations']['core-site']['io.compression.codecs']

+ 39 - 15
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py

@@ -31,6 +31,15 @@ class HdfsServiceCheck(Script):
 
 
     safemode_command = "dfsadmin -safemode get | grep OFF"
     safemode_command = "dfsadmin -safemode get | grep OFF"
 
 
+    create_dir_cmd = format("fs -mkdir {dir}")
+    chmod_command = format("fs -chmod 777 {dir}")
+    test_dir_exists = as_user(format("{hadoop_bin_dir}/hadoop --config {hadoop_conf_dir} fs -test -e {dir}"), params.smoke_user)
+    cleanup_cmd = format("fs -rm {tmp_file}")
+    #cleanup put below to handle retries; if retrying there wil be a stale file
+    #that needs cleanup; exit code is fn of second command
+    create_file_cmd = format(
+      "{cleanup_cmd}; hadoop --config {hadoop_conf_dir} fs -put /etc/passwd {tmp_file}")
+    test_cmd = format("fs -test -e {tmp_file}")
     if params.security_enabled:
     if params.security_enabled:
       Execute(format("{kinit_path_local} -kt {smoke_user_keytab} {smoke_user}"),
       Execute(format("{kinit_path_local} -kt {smoke_user_keytab} {smoke_user}"),
         user=params.smoke_user
         user=params.smoke_user
@@ -43,24 +52,39 @@ class HdfsServiceCheck(Script):
                   tries=20,
                   tries=20,
                   bin_dir=params.hadoop_bin_dir
                   bin_dir=params.hadoop_bin_dir
     )
     )
-    params.HdfsResource(dir,
-                        type="directory",
-                        action="create_delayed",
-                        mode=0777
+    ExecuteHadoop(create_dir_cmd,
+                  user=params.smoke_user,
+                  logoutput=True,
+                  not_if=test_dir_exists,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
     )
-    #cleanup put below to handle retries; if retrying there wil be a stale file
-    #that needs cleanup; exit code is fn of second command
-    params.HdfsResource(tmp_file,
-                        type="directory",
-                        action="delete_delayed",
+    ExecuteHadoop(chmod_command,
+                  user=params.smoke_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
     )
-    params.HdfsResource(tmp_file,
-                        type="directory",
-                        source="/etc/passwd",
-                        action="create_delayed"
+    ExecuteHadoop(create_file_cmd,
+                  user=params.smoke_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
+    )
+    ExecuteHadoop(test_cmd,
+                  user=params.smoke_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
     )
-    params.HdfsResource(None, action="execute")
-
     if params.has_journalnode_hosts:
     if params.has_journalnode_hosts:
       journalnode_port = params.journalnode_port
       journalnode_port = params.journalnode_port
       checkWebUIFileName = "checkWebUI.py"
       checkWebUIFileName = "checkWebUI.py"

+ 3 - 27
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh

@@ -20,23 +20,13 @@
 #
 #
 #
 #
 
 
-function getValueFromField {
-  xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | cut -d "<" -f1
-  return $?
-}
-
 export ttonhost=$1
 export ttonhost=$1
 export smoke_test_user=$2
 export smoke_test_user=$2
 export smoke_user_keytab=$3
 export smoke_user_keytab=$3
 export security_enabled=$4
 export security_enabled=$4
 export kinit_path_local=$5
 export kinit_path_local=$5
-export hadoop_conf_dir=$6
 export ttonurl="http://${ttonhost}:50111/templeton/v1"
 export ttonurl="http://${ttonhost}:50111/templeton/v1"
 
 
-export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.defaultFS`
-export JSON_PATH='/var/lib/ambari-agent/data/hdfs_resources.json'
-export JAR_PATH='/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
-
 if [[ $security_enabled == "true" ]]; then
 if [[ $security_enabled == "true" ]]; then
   kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
   kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
 else
 else
@@ -84,25 +74,11 @@ echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
 echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
 echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
 echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
 echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
 
 
-cat >$JSON_PATH<<EOF
-[{
-	"target":"/tmp/${ttonTestScript}",
-	"type":"directory",
-	"action":"create",
-	"source":"/tmp/${ttonTestScript}"
-},
-{
-	"target":"${ttonTestInput}",
-	"type":"directory",
-	"action":"create",
-	"source":"/etc/passwd"
-}]
-EOF
-
 #copy pig script to hdfs
 #copy pig script to hdfs
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
+
 #copy input file to hdfs
 #copy input file to hdfs
-echo "About to run: hadoop --config ${hadoop_conf_dir} jar ${JAR_PATH} ${JSON_PATH} ${NAMENODE}"
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop --config ${hadoop_conf_dir} jar ${JAR_PATH} ${JSON_PATH} ${NAMENODE}"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
 
 
 #create, copy post args file
 #create, copy post args file
 echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
 echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt

+ 3 - 83
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py

@@ -19,8 +19,6 @@ limitations under the License.
 """
 """
 
 
 from resource_management import *
 from resource_management import *
-from install_jars import install_tez_jars
-import glob
 import sys
 import sys
 import os
 import os
 
 
@@ -29,96 +27,18 @@ def hive(name=None):
   import params
   import params
 
 
   if name == 'hiveserver2':
   if name == 'hiveserver2':
-    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >=0:
-      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.mapreduce_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-    else:
-      install_tez_jars()
-        
-    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
-      params.HdfsResource(params.webhcat_apps_dir,
-                           type="directory",
-                           action="create_delayed",
-                           owner=params.webhcat_user,
-                           mode=0755
-      )
-  
-    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-      params.HdfsResource(params.hcat_hdfs_user_dir,
-                           type="directory",
-                           action="create_delayed",
-                           owner=params.hcat_user,
-                           mode=params.hcat_hdfs_user_mode
-      )
-    params.HdfsResource(params.webhcat_hdfs_user_dir,
-                         type="directory",
-                         action="create_delayed",
-                         owner=params.webhcat_user,
-                         mode=params.webhcat_hdfs_user_mode
-    )
-  
-    for src_filepath in glob.glob(params.hadoop_streaming_tar_source):
-      src_filename = os.path.basename(src_filepath)
-      params.HdfsResource(InlineTemplate(params.hadoop_streaming_tar_destination_dir).get_content() + '/' + src_filename,
-                          type="file",
-                          action="create_delayed",
-                          source=src_filepath,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-  
-    if (os.path.isfile(params.pig_tar_source)):
-      params.HdfsResource(InlineTemplate(params.pig_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.pig_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-  
-    params.HdfsResource(InlineTemplate(params.hive_tar_destination).get_content(),
-                        type="file",
-                        action="create_delayed",
-                        source=params.hive_tar_source,
-                        group=params.user_group,
-                        mode=params.tarballs_mode
-    )
 
 
-    for src_filepath in glob.glob(params.sqoop_tar_source):
-      src_filename = os.path.basename(src_filepath)
-      params.HdfsResource(InlineTemplate(params.sqoop_tar_destination_dir).get_content() + '/' + src_filename,
-                          type="file",
-                          action="create_delayed",
-                          source=src_filepath,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-      
-    params.HdfsResource(params.hive_apps_whs_dir,
-                         type="directory",
+    params.HdfsDirectory(params.hive_apps_whs_dir,
                          action="create_delayed",
                          action="create_delayed",
                          owner=params.hive_user,
                          owner=params.hive_user,
                          mode=0777
                          mode=0777
     )
     )
-    params.HdfsResource(params.hive_hdfs_user_dir,
-                         type="directory",
+    params.HdfsDirectory(params.hive_hdfs_user_dir,
                          action="create_delayed",
                          action="create_delayed",
                          owner=params.hive_user,
                          owner=params.hive_user,
                          mode=params.hive_hdfs_user_mode
                          mode=params.hive_hdfs_user_mode
     )
     )
-    params.HdfsResource(None, action="execute")
+    params.HdfsDirectory(None, action="create")
 
 
   Directory(params.hive_conf_dir_prefix,
   Directory(params.hive_conf_dir_prefix,
             mode=0755
             mode=0755

+ 6 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -22,8 +22,8 @@ import hive_server_upgrade
 from resource_management import *
 from resource_management import *
 from hive import hive
 from hive import hive
 from hive_service import hive_service
 from hive_service import hive_service
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from install_jars import install_tez_jars
 from install_jars import install_tez_jars
-from resource_management.libraries.functions.version import compare_versions
 
 
 class HiveServer(Script):
 class HiveServer(Script):
 
 
@@ -35,8 +35,7 @@ class HiveServer(Script):
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    
-    if not (params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >=0):
+    if not (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >=0):
       install_tez_jars()
       install_tez_jars()
 
 
     hive(name='hiveserver2')
     hive(name='hiveserver2')
@@ -47,6 +46,10 @@ class HiveServer(Script):
     env.set_params(params)
     env.set_params(params)
     self.configure(env) # FOR SECURITY
     self.configure(env) # FOR SECURITY
 
 
+    # This function is needed in HDP 2.2, but it is safe to call in earlier versions.
+    copy_tarballs_to_hdfs('mapreduce', params.tez_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('tez', params.tez_user, params.hdfs_user, params.user_group)
+
     hive_service( 'hiveserver2', action = 'start',
     hive_service( 'hiveserver2', action = 'start',
       rolling_restart=rolling_restart )
       rolling_restart=rolling_restart )
 
 

+ 36 - 19
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py

@@ -20,7 +20,6 @@ limitations under the License.
 
 
 from resource_management import *
 from resource_management import *
 import os
 import os
-import glob
 
 
 def install_tez_jars():
 def install_tez_jars():
   import params
   import params
@@ -30,12 +29,25 @@ def install_tez_jars():
   # If tez libraries are to be stored in hdfs
   # If tez libraries are to be stored in hdfs
   if destination_hdfs_dirs:
   if destination_hdfs_dirs:
     for hdfs_dir in destination_hdfs_dirs:
     for hdfs_dir in destination_hdfs_dirs:
-      params.HdfsResource(hdfs_dir,
-                           type="directory",
+      params.HdfsDirectory(hdfs_dir,
                            action="create_delayed",
                            action="create_delayed",
                            owner=params.tez_user,
                            owner=params.tez_user,
                            mode=0755
                            mode=0755
       )
       )
+    pass
+    params.HdfsDirectory(None, action="create")
+
+    if params.security_enabled:
+      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
+    else:
+      kinit_if_needed = ""
+
+    if kinit_if_needed:
+      Execute(kinit_if_needed,
+              user=params.tez_user,
+              path='/bin'
+      )
+    pass
 
 
     app_dir_path = None
     app_dir_path = None
     lib_dir_path = None
     lib_dir_path = None
@@ -49,25 +61,30 @@ def install_tez_jars():
         pass
         pass
       pass
       pass
     pass
     pass
-  
-    tez_jars = {}
+
     if app_dir_path:
     if app_dir_path:
-      tez_jars[params.tez_local_api_jars] = app_dir_path
-    if lib_dir_path:
-      tez_jars[params.tez_local_lib_jars] = lib_dir_path
-
-    for src_file_regex, dest_dir in tez_jars.iteritems():
-      for src_filepath in glob.glob(src_file_regex):
-        src_filename = os.path.basename(src_filepath)
-        params.HdfsResource(format("{dest_dir}/{src_filename}"),
-                            type="file",
-                            action="create_delayed",
-                            source=src_filepath,
-                            mode=0755,
-                            owner=params.tez_user
+      for scr_file, dest_file in params.app_dir_files.iteritems():
+        CopyFromLocal(scr_file,
+                      mode=0755,
+                      owner=params.tez_user,
+                      dest_dir=app_dir_path,
+                      dest_file=dest_file,
+                      kinnit_if_needed=kinit_if_needed,
+                      hdfs_user=params.hdfs_user,
+                      hadoop_bin_dir=params.hadoop_bin_dir,
+                      hadoop_conf_dir=params.hadoop_conf_dir
         )
         )
 
 
-    params.HdfsResource(None, action="execute")
+    if lib_dir_path:
+      CopyFromLocal(params.tez_local_lib_jars,
+                    mode=0755,
+                    owner=params.tez_user,
+                    dest_dir=lib_dir_path,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
     pass
     pass
 
 
 
 

+ 15 - 51
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py

@@ -29,19 +29,15 @@ tmp_dir = Script.get_tmp_dir()
 
 
 # This is expected to be of the form #.#.#.#
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
-stack_is_hdp21 = hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, '2.1') >= 0 and compare_versions(hdp_stack_version_major, '2.2') < 0
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
 
 
-# this is not avaliable on INSTALL action because hdp-select is not available
-hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
 
 
-webhcat_apps_dir = "/apps/webhcat"
-
 # Hadoop params
 # Hadoop params
 # TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
 # TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
-if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, '2.2') >=0:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
   # start out with client libraries
   # start out with client libraries
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   hadoop_home = '/usr/hdp/current/hadoop-client'
@@ -64,25 +60,6 @@ if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, '
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
 
 
   hive_specific_configs_supported = True
   hive_specific_configs_supported = True
-
-  # --- Tarballs ---
-
-  hive_tar_source = config['configurations']['cluster-env']['hive_tar_source']
-  pig_tar_source = config['configurations']['cluster-env']['pig_tar_source']
-  hadoop_streaming_tar_source = config['configurations']['cluster-env']['hadoop-streaming_tar_source']
-  sqoop_tar_source = config['configurations']['cluster-env']['sqoop_tar_source']
-  mapreduce_tar_source = config['configurations']['cluster-env']['mapreduce_tar_source']
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-
-  hive_tar_destination = config['configurations']['cluster-env']['hive_tar_destination_folder']  + "/" + os.path.basename(hive_tar_source)
-  pig_tar_destination = config['configurations']['cluster-env']['pig_tar_destination_folder'] + "/" + os.path.basename(pig_tar_source)
-  hadoop_streaming_tar_destination_dir = config['configurations']['cluster-env']['hadoop-streaming_tar_destination_folder']
-  sqoop_tar_destination_dir = config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + "/" + os.path.basename(sqoop_tar_source)
-  mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
-  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
-
-  tarballs_mode = 0444
-
 else:
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
   hadoop_home = '/usr'
@@ -93,7 +70,7 @@ else:
   hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
   hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
   sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
   sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
 
 
-  if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, "2.1.0.0") < 0:
+  if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
     hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
     hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
     webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
     webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
   # for newer versions
   # for newer versions
@@ -103,27 +80,13 @@ else:
     
     
   hive_specific_configs_supported = False
   hive_specific_configs_supported = False
 
 
-  # --- Tarballs ---
-  hive_tar_source = hive_tar_file
-  pig_tar_source = pig_tar_file
-  hadoop_streaming_tar_source = hadoop_streeming_jars
-  sqoop_tar_source = sqoop_tar_file
-
-  hive_tar_destination = webhcat_apps_dir + "/" + os.path.basename(hive_tar_source)
-  pig_tar_destination = webhcat_apps_dir + "/" + os.path.basename(pig_tar_source)
-  hadoop_streaming_tar_destination_dir = webhcat_apps_dir
-  sqoop_tar_destination_dir = webhcat_apps_dir
-
-  tarballs_mode = 0755
-
-
 hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_conf_dir = "/etc/hadoop/conf"
 hive_conf_dir_prefix = "/etc/hive"
 hive_conf_dir_prefix = "/etc/hive"
 hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
 hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
 hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
 hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
 hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
 hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
 
 
-if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, "2.1.0.0") < 0:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
   hcat_conf_dir = '/etc/hcatalog/conf'
   hcat_conf_dir = '/etc/hcatalog/conf'
   config_dir = '/etc/hcatalog/conf'
   config_dir = '/etc/hcatalog/conf'
 # for newer versions
 # for newer versions
@@ -233,7 +196,7 @@ mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
 mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
 mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
 
 
 ######## Metastore Schema
 ######## Metastore Schema
-if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, "2.1.0.0") < 0:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
   init_metastore_schema = False
   init_metastore_schema = False
 else:
 else:
   init_metastore_schema = True
   init_metastore_schema = True
@@ -278,6 +241,7 @@ tez_user = config['configurations']['tez-env']['tez_user']
 # Tez jars
 # Tez jars
 tez_local_api_jars = '/usr/lib/tez/tez*.jar'
 tez_local_api_jars = '/usr/lib/tez/tez*.jar'
 tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
 tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+app_dir_files = {tez_local_api_jars:None}
 
 
 # Tez libraries
 # Tez libraries
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
@@ -321,6 +285,7 @@ templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
 
 
 webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
 webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
 
 
+webhcat_apps_dir = "/apps/webhcat"
 
 
 hcat_hdfs_user_dir = format("/user/{hcat_user}")
 hcat_hdfs_user_dir = format("/user/{hcat_user}")
 hcat_hdfs_user_mode = 0755
 hcat_hdfs_user_mode = 0755
@@ -330,15 +295,14 @@ webhcat_hdfs_user_mode = 0755
 security_param = "true" if security_enabled else "false"
 security_param = "true" if security_enabled else "false"
 
 
 import functools
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user = hdfs_principal_name if security_enabled else hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir = hadoop_conf_dir,
+  hdfs_user = hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 )

+ 72 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py

@@ -20,15 +20,37 @@ Ambari Agent
 """
 """
 import sys
 import sys
 import os.path
 import os.path
+import glob
 
 
 from resource_management import *
 from resource_management import *
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 
 
 
 
-def webhcat(env):
+def webhcat():
   import params
   import params
 
 
+  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, "2.2.0.0") < 0:
+    params.HdfsDirectory(params.webhcat_apps_dir,
+                         action="create_delayed",
+                         owner=params.webhcat_user,
+                         mode=0755
+    )
+  
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(None, action="create")
+
   Directory(params.templeton_pid_dir,
   Directory(params.templeton_pid_dir,
             owner=params.webhcat_user,
             owner=params.webhcat_user,
             mode=0755,
             mode=0755,
@@ -57,6 +79,55 @@ def webhcat(env):
             path='/bin'
             path='/bin'
     )
     )
 
 
+  # TODO, these checks that are specific to HDP 2.2 and greater should really be in a script specific to that stack.
+  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, "2.2.0.0") >= 0:
+    copy_tarballs_to_hdfs('hive', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('pig', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('hadoop-streaming', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('sqoop', params.webhcat_user, params.hdfs_user, params.user_group)
+  else:
+    CopyFromLocal(params.hadoop_streeming_jars,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )
+
+    if (os.path.isfile(params.pig_tar_file)):
+      CopyFromLocal(params.pig_tar_file,
+                    owner=params.webhcat_user,
+                    mode=0755,
+                    dest_dir=params.webhcat_apps_dir,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+
+    CopyFromLocal(params.hive_tar_file,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )
+
+    if (len(glob.glob(params.sqoop_tar_file)) > 0):
+      CopyFromLocal(params.sqoop_tar_file,
+                    owner=params.webhcat_user,
+                    mode=0755,
+                    dest_dir=params.webhcat_apps_dir,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+
   XmlConfig("webhcat-site.xml",
   XmlConfig("webhcat-site.xml",
             conf_dir=params.config_dir,
             conf_dir=params.config_dir,
             configurations=params.config['configurations']['webhcat-site'],
             configurations=params.config['configurations']['webhcat-site'],

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py

@@ -31,7 +31,7 @@ class WebHCatServer(Script):
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    webhcat(env)
+    webhcat()
 
 
 
 
   def start(self, env, rolling_restart=False):
   def start(self, env, rolling_restart=False):

+ 4 - 29
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh

@@ -70,8 +70,6 @@ export OOZIE_EXIT_CODE=0
 export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml yarn.resourcemanager.address`
 export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml yarn.resourcemanager.address`
 export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.defaultFS`
 export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.defaultFS`
 export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml oozie.base.url | tr '[:upper:]' '[:lower:]'`
 export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml oozie.base.url | tr '[:upper:]' '[:lower:]'`
-export JSON_PATH='/var/lib/ambari-agent/data/hdfs_resources.json'
-export JAR_PATH='/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
 
 
 if [ "$os_family" == "ubuntu" ] ; then
 if [ "$os_family" == "ubuntu" ] ; then
   LIST_PACKAGE_FILES_CMD='dpkg-query -L'
   LIST_PACKAGE_FILES_CMD='dpkg-query -L'
@@ -102,33 +100,10 @@ else
   kinitcmd=""
   kinitcmd=""
 fi
 fi
 
 
-cat >$JSON_PATH<<EOF
-[{
-	"target":"examples",
-	"type":"directory",
-	"action":"delete"
-},
-{
-	"target":"input-data",
-	"type":"directory",
-	"action":"delete"
-},
-{
-	"target":"examples",
-	"type":"directory",
-	"action":"create",
-	"source":"$OOZIE_EXAMPLES_DIR/examples"
-},
-{
-	"target":"input-data",
-	"type":"directory",
-	"action":"create",
-	"source":"$OOZIE_EXAMPLES_DIR/examples/input-data"
-}]
-EOF
-
-echo "About to run: hadoop --config ${hadoop_conf_dir} jar ${JAR_PATH} ${JSON_PATH} ${NAMENODE}"
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop --config ${hadoop_conf_dir} jar ${JAR_PATH} ${JSON_PATH} ${NAMENODE}"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r examples"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r input-data"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
 
 
 cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie -Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config $OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
 cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie -Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config $OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
 echo $cmd
 echo $cmd

+ 2 - 4
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py

@@ -26,13 +26,11 @@ def oozie(is_server=False # TODO: see if see can remove this
   import params
   import params
 
 
   if is_server:
   if is_server:
-    params.HdfsResource(params.oozie_hdfs_user_dir,
-                         type="directory",
-                         action="create_delayed",
+    params.HdfsDirectory(params.oozie_hdfs_user_dir,
+                         action="create",
                          owner=params.oozie_user,
                          owner=params.oozie_user,
                          mode=params.oozie_hdfs_user_mode
                          mode=params.oozie_hdfs_user_mode
     )
     )
-    params.HdfsResource(None, action="execute")
   Directory(params.conf_dir,
   Directory(params.conf_dir,
              recursive = True,
              recursive = True,
              owner = params.oozie_user,
              owner = params.oozie_user,

+ 7 - 8
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py

@@ -147,17 +147,16 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 )
 
 
 #LZO support
 #LZO support

+ 7 - 13
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py

@@ -21,7 +21,6 @@ Ambari Agent
 
 
 from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
 from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
 from resource_management import *
 from resource_management import *
-import os
 
 
 # server configurations
 # server configurations
 config = Script.get_config()
 config = Script.get_config()
@@ -38,9 +37,6 @@ if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
-
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
 else:
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
   hadoop_home = '/usr'
@@ -57,7 +53,6 @@ security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 pig_env_sh_template = config['configurations']['pig-env']['content']
 pig_env_sh_template = config['configurations']['pig-env']['content']
-fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 
 # not supporting 32 bit jdk.
 # not supporting 32 bit jdk.
 java64_home = config['hostLevelParams']['java_home']
 java64_home = config['hostLevelParams']['java_home']
@@ -67,15 +62,14 @@ pig_properties = config['configurations']['pig-properties']['content']
 log4j_props = config['configurations']['pig-log4j']['content']
 log4j_props = config['configurations']['pig-log4j']['content']
 
 
 import functools
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_principal_name if security_enabled else hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 )

+ 31 - 44
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py

@@ -20,34 +20,32 @@ Ambari Agent
 """
 """
 
 
 from resource_management import *
 from resource_management import *
-from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 
 
 class PigServiceCheck(Script):
 class PigServiceCheck(Script):
   def service_check(self, env):
   def service_check(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    input_file = format('/user/{smokeuser}/passwd')
-    output_dir = format('/user/{smokeuser}/pigsmoke.out')
+    input_file = 'passwd'
+    output_file = "pigsmoke.out"
 
 
+    cleanup_cmd = format("dfs -rmr {output_file} {input_file}")
     #cleanup put below to handle retries; if retrying there wil be a stale file that needs cleanup; exit code is fn of second command
     #cleanup put below to handle retries; if retrying there wil be a stale file that needs cleanup; exit code is fn of second command
-    params.HdfsResource(output_dir,
-                        type="directory",
-                        action="delete_delayed",
-                        user=params.smokeuser,
-                        )
-    params.HdfsResource(input_file,
-                        type="file",
-                        action="delete_delayed",
-                        user=params.smokeuser,
-                        )
-    params.HdfsResource(input_file,
-                        type="file",
-                        source="/etc/passwd",
-                        action="create_delayed",
-                        user=params.smokeuser,
+    create_file_cmd = format("{cleanup_cmd}; hadoop --config {hadoop_conf_dir} dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command needs hadoop
+    test_cmd = format("fs -test -e {output_file}")
+
+    ExecuteHadoop( create_file_cmd,
+      tries     = 3,
+      try_sleep = 5,
+      user      = params.smokeuser,
+      conf_dir = params.hadoop_conf_dir,
+      # for kinit run
+      keytab = params.smoke_user_keytab,
+      security_enabled = params.security_enabled,
+      kinit_path_local = params.kinit_path_local,
+      bin_dir = params.hadoop_bin_dir
     )
     )
-    params.HdfsResource(None, action="execute")
 
 
     File( format("{tmp_dir}/pigSmoke.sh"),
     File( format("{tmp_dir}/pigSmoke.sh"),
       content = StaticFile("pigSmoke.sh"),
       content = StaticFile("pigSmoke.sh"),
@@ -62,39 +60,28 @@ class PigServiceCheck(Script):
       user      = params.smokeuser
       user      = params.smokeuser
     )
     )
 
 
-    test_cmd = format("fs -test -e {output_dir}")
     ExecuteHadoop( test_cmd,
     ExecuteHadoop( test_cmd,
-      user = params.smokeuser,
+      user      = params.smokeuser,
       conf_dir = params.hadoop_conf_dir,
       conf_dir = params.hadoop_conf_dir,
       bin_dir = params.hadoop_bin_dir
       bin_dir = params.hadoop_bin_dir
     )
     )
 
 
     if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
     if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
       # cleanup results from previous test
       # cleanup results from previous test
-      params.HdfsResource(output_dir,
-                          type="directory",
-                          action="delete_delayed",
-                          user=params.smokeuser,
-                          )
-      params.HdfsResource(input_file,
-                          type="file",
-                          action="delete_delayed",
-                          user=params.smokeuser,
-                          )
-      params.HdfsResource(input_file,
-                          type="file",
-                          source="/etc/passwd",
-                          action="create_delayed",
-                          user=params.smokeuser,
-      )
-      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
+      ExecuteHadoop( create_file_cmd,
+        tries     = 3,
+        try_sleep = 5,
+        user      = params.smokeuser,
+        conf_dir = params.hadoop_conf_dir,
+        # for kinit run
+        keytab = params.smoke_user_keytab,
+        security_enabled = params.security_enabled,
+        kinit_path_local = params.kinit_path_local,
+        bin_dir = params.hadoop_bin_dir
       )
       )
-      params.HdfsResource(None, action="execute")
+
+      # Check for Pig-on-Tez
+      copy_tarballs_to_hdfs('tez', params.smokeuser, params.hdfs_user, params.user_group)
 
 
       if params.security_enabled:
       if params.security_enabled:
         kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")
         kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")

+ 7 - 9
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py

@@ -48,22 +48,20 @@ config_dir = format("{config_dir_prefix}/conf")
 
 
 hadoop_home = '/usr'
 hadoop_home = '/usr'
 java64_home = config['hostLevelParams']['java_home']
 java64_home = config['hostLevelParams']['java_home']
-fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 
 tez_user = config['configurations']['tez-env']['tez_user']
 tez_user = config['configurations']['tez-env']['tez_user']
 user_group = config['configurations']['cluster-env']['user_group']
 user_group = config['configurations']['cluster-env']['user_group']
 tez_env_sh_template = config['configurations']['tez-env']['content']
 tez_env_sh_template = config['configurations']['tez-env']['content']
 
 
 import functools
 import functools
-# Create partial functions with common arguments for every HdfsResource call
-# to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_principal_name if security_enabled else hdfs_user,
+# Create partial functions with common arguments for every HdfsDirectory call
+# to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled=security_enabled,
   security_enabled=security_enabled,
   keytab=hdfs_user_keytab,
   keytab=hdfs_user_keytab,
   kinit_path_local=kinit_path_local,
   kinit_path_local=kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir=hadoop_bin_dir
 )
 )

+ 3 - 21
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py

@@ -20,6 +20,7 @@ Ambari Agent
 """
 """
 
 
 from resource_management import *
 from resource_management import *
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 
 
@@ -42,32 +43,13 @@ class HistoryServer(Script):
 
 
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       Execute(format("hdp-select set hadoop-mapreduce-historyserver {version}"))
       Execute(format("hdp-select set hadoop-mapreduce-historyserver {version}"))
-
-      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.mapreduce_tar_source,
-                          owner=params.hdfs_user,
-                          group=params.user_group,
-                          mode=0444,
-      )
-      params.HdfsResource(None, action="execute")
+      copy_tarballs_to_hdfs('mapreduce', params.mapred_user, params.hdfs_user, params.user_group)
 
 
   def start(self, env, rolling_restart=False):
   def start(self, env, rolling_restart=False):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
     self.configure(env) # FOR SECURITY
     self.configure(env) # FOR SECURITY
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
-
-      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.mapreduce_tar_source,
-                          owner=params.hdfs_user,
-                          group=params.user_group,
-                          mode=0444,
-      )
-      params.HdfsResource(None, action="execute")
+    copy_tarballs_to_hdfs('mapreduce', params.mapred_user, params.hdfs_user, params.user_group)
     service('historyserver', action='start', serviceName='mapreduce')
     service('historyserver', action='start', serviceName='mapreduce')
 
 
   def stop(self, env, rolling_restart=False):
   def stop(self, env, rolling_restart=False):

+ 9 - 19
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py

@@ -30,10 +30,7 @@ tmp_dir = Script.get_tmp_dir()
 
 
 # This is expected to be of the form #.#.#.#
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
-
-# this is not avaliable on INSTALL action because hdp-select is not available
-hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
@@ -41,7 +38,7 @@ version = default("/commandParams/version", None)
 hostname = config['hostname']
 hostname = config['hostname']
 
 
 #hadoop params
 #hadoop params
-if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, '2.2') >= 0:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   yarn_role_root = "hadoop-yarn-client"
   yarn_role_root = "hadoop-yarn-client"
   mapred_role_root = "hadoop-mapreduce-client"
   mapred_role_root = "hadoop-mapreduce-client"
 
 
@@ -69,9 +66,6 @@ if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, '
   hadoop_yarn_home            = format("/usr/hdp/current/{yarn_role_root}")
   hadoop_yarn_home            = format("/usr/hdp/current/{yarn_role_root}")
   yarn_bin                    = format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_bin                    = format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_container_bin          = format("/usr/hdp/current/{yarn_role_root}/bin")
   yarn_container_bin          = format("/usr/hdp/current/{yarn_role_root}/bin")
-
-  mapreduce_tar_source = config['configurations']['cluster-env']['mapreduce_tar_source']
-  mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
 else:
 else:
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_bin = "/usr/lib/hadoop/sbin"
   hadoop_bin = "/usr/lib/hadoop/sbin"
@@ -189,21 +183,17 @@ jobhistory_heapsize = default("/configurations/mapred-env/jobhistory_heapsize",
 #for create_hdfs_directory
 #for create_hdfs_directory
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-
-fs_root = config['configurations']['core-site']['fs.defaultFS']
-
 import functools
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 )
 update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
 update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
 
 

+ 10 - 17
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py

@@ -28,42 +28,37 @@ def yarn(name = None):
   import params
   import params
 
 
 
 
-  if name == "historyserver":
+  if name in ["nodemanager","historyserver"]:
     if params.yarn_log_aggregation_enabled:
     if params.yarn_log_aggregation_enabled:
-      params.HdfsResource(params.yarn_nm_app_log_dir,
-                           type="directory",
+      params.HdfsDirectory(params.yarn_nm_app_log_dir,
                            action="create_delayed",
                            action="create_delayed",
                            owner=params.yarn_user,
                            owner=params.yarn_user,
                            group=params.user_group,
                            group=params.user_group,
                            mode=0777,
                            mode=0777,
                            recursive_chmod=True
                            recursive_chmod=True
       )
       )
-    params.HdfsResource("/mapred",
-                         type="directory",
+    params.HdfsDirectory("/mapred",
                          action="create_delayed",
                          action="create_delayed",
                          owner=params.mapred_user
                          owner=params.mapred_user
     )
     )
-    params.HdfsResource("/mapred/system",
-                         type="directory",
+    params.HdfsDirectory("/mapred/system",
                          action="create_delayed",
                          action="create_delayed",
                          owner=params.hdfs_user
                          owner=params.hdfs_user
     )
     )
-    params.HdfsResource(params.mapreduce_jobhistory_intermediate_done_dir,
-                         type="directory",
+    params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
                          action="create_delayed",
                          action="create_delayed",
                          owner=params.mapred_user,
                          owner=params.mapred_user,
                          group=params.user_group,
                          group=params.user_group,
                          mode=0777
                          mode=0777
     )
     )
 
 
-    params.HdfsResource(params.mapreduce_jobhistory_done_dir,
-                         type="directory",
+    params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
                          action="create_delayed",
                          action="create_delayed",
                          owner=params.mapred_user,
                          owner=params.mapred_user,
                          group=params.user_group,
                          group=params.user_group,
-                         mode=0777
+                         mode=01777
     )
     )
-    params.HdfsResource(None, action="execute")
+    params.HdfsDirectory(None, action="create")
 
 
   if name == "nodemanager":
   if name == "nodemanager":
     Directory(params.nm_local_dirs.split(',') + params.nm_log_dirs.split(','),
     Directory(params.nm_local_dirs.split(',') + params.nm_log_dirs.split(','),
@@ -134,14 +129,12 @@ def yarn(name = None):
        group=params.user_group
        group=params.user_group
     )
     )
     if params.node_labels_dir:
     if params.node_labels_dir:
-      params.HdfsResource(params.node_labels_dir,
-                           type="directory",
-                           action="create_delayed",
+      params.HdfsDirectory(params.node_labels_dir,
+                           action="create",
                            owner=params.yarn_user,
                            owner=params.yarn_user,
                            group=params.user_group,
                            group=params.user_group,
                            mode=0700
                            mode=0700
       )
       )
-      params.HdfsResource(None, action="execute")
   elif name == 'apptimelineserver':
   elif name == 'apptimelineserver':
     Directory(params.ats_leveldb_dir,
     Directory(params.ats_leveldb_dir,
        owner=params.yarn_user,
        owner=params.yarn_user,

+ 89 - 106
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py

@@ -271,42 +271,36 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2'
                               content='log4jproperties\nline2'
     )
     )
-
-    self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0711,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hbase',
     self.assertResourceCalled('Directory', '/etc/hbase',
@@ -391,41 +385,36 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2'
                               content='log4jproperties\nline2'
     )
     )
-    self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0711,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
 
   def test_start_default_22(self):
   def test_start_default_22(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
@@ -516,41 +505,35 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2')
                               content='log4jproperties\nline2')
 
 
-    self.assertResourceCalled('HdfsResource', 'hdfs://nn1/apps/hbase/data',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://nn1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://nn1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0711,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://nn1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create'])
 
 
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
       not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
       not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',

+ 90 - 0
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py

@@ -198,6 +198,36 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2'
                               content='log4jproperties\nline2'
     )
     )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hbase',
     self.assertResourceCalled('Directory', '/etc/hbase',
@@ -282,6 +312,36 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2'
                               content='log4jproperties\nline2'
     )
     )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
 
   def test_start_default_22(self):
   def test_start_default_22(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
@@ -372,6 +432,36 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               owner='hbase',
                               content='log4jproperties\nline2')
                               content='log4jproperties\nline2')
 
 
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create'])
+
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf start regionserver',
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf start regionserver',
       not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
       not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
       user = 'hbase')
       user = 'hbase')

+ 0 - 6
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py

@@ -379,9 +379,6 @@ class TestDatanode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -428,9 +425,6 @@ class TestDatanode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'root',
                               owner = 'root',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',

+ 0 - 6
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py

@@ -206,9 +206,6 @@ class TestJournalnode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
     self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
@@ -247,6 +244,3 @@ class TestJournalnode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'root',
                               owner = 'root',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )

+ 189 - 225
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

@@ -88,6 +88,7 @@ class TestNamenode(RMFTestCase):
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
     )
     )
+    self.printResources()
     self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf dfsadmin -safemode leave',
     self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf dfsadmin -safemode leave',
         path = ['/usr/bin'],
         path = ['/usr/bin'],
         user = 'hdfs',
         user = 'hdfs',
@@ -99,43 +100,38 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         user = 'hdfs',
         try_sleep = 10,
         try_sleep = 10,
     )
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        only_if= None,
-    )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = None,
+                              )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def test_stop_default(self):
   def test_stop_default(self):
@@ -230,43 +226,38 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         user = 'hdfs',
         try_sleep = 10,
         try_sleep = 10,
     )
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        only_if = None,
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_bin_dir = '/usr/bin',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = None,
+                              )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def test_stop_secured(self):
   def test_stop_secured(self):
@@ -332,42 +323,37 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         user = 'hdfs',
         try_sleep = 10,
         try_sleep = 10,
     )
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
         security_enabled = False,
         security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
+        action = ['create'],
+        bin_dir = '/usr/bin',
         only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
         only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
-        keytab = UnknownConfigurationMock(),
-        hadoop_bin_dir = '/usr/bin',
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -416,43 +402,37 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         user = 'hdfs',
         try_sleep = 10,
         try_sleep = 10,
     )
     )
-
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
         security_enabled = True,
         security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://ns1',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
+        action = ['create'],
+        bin_dir = '/usr/bin',
         only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
         only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_bin_dir = '/usr/bin',
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -514,43 +494,38 @@ class TestNamenode(RMFTestCase):
                               user = 'hdfs',
                               user = 'hdfs',
                               try_sleep = 10,
                               try_sleep = 10,
                               )
                               )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        only_if= "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
-    )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+                              )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   # tests namenode start command when NameNode HA is enabled, and
   # tests namenode start command when NameNode HA is enabled, and
@@ -599,49 +574,44 @@ class TestNamenode(RMFTestCase):
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
                               )
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 'Safe mode is OFF'",
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 'Safe mode is OFF'",
-        path = ['/usr/bin'],
-        tries = 40,
-        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
-        user = 'hdfs',
-        try_sleep = 10,
-    )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
-        keytab = UnknownConfigurationMock(),
-        hadoop_bin_dir = '/usr/bin',
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+                              path = ['/usr/bin'],
+                              tries = 40,
+                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+                              user = 'hdfs',
+                              try_sleep = 10,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+                              )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def test_decommission_default(self):
   def test_decommission_default(self):
@@ -758,9 +728,6 @@ class TestNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -800,9 +767,6 @@ class TestNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'root',
                               owner = 'root',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',

+ 29 - 41
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py

@@ -59,48 +59,36 @@ class TestServiceCheck(RMFTestCase):
         bin_dir = '/usr/bin',
         bin_dir = '/usr/bin',
         user = 'ambari-qa',
         user = 'ambari-qa',
     )
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_delayed'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        mode = 0777,
+    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
+        conf_dir = '/etc/hadoop/conf',
+        logoutput = True,
+        not_if = "/usr/bin/sudo su ambari-qa -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]/usr/bin/hadoop --config /etc/hadoop/conf fs -test -e /tmp'",
+        try_sleep = 3,
+        tries = 5,
+        bin_dir = '/usr/bin',
+        user = 'ambari-qa',
     )
     )
-    self.assertResourceCalled('HdfsResource', '/tmp/',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['delete_delayed'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -chmod 777 /tmp',
+        conf_dir = '/etc/hadoop/conf',
+        logoutput = True,
+        try_sleep = 3,
+        tries = 5,
+        bin_dir = '/usr/bin',
+        user = 'ambari-qa',
     )
     )
-    self.assertResourceCalled('HdfsResource', '/tmp/',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        source = '/etc/passwd',
-        user = 'hdfs',
-        action = ['create_delayed'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop --config /etc/hadoop/conf fs -put /etc/passwd /tmp/',
+        logoutput = True,
+        tries = 5,
+        conf_dir = '/etc/hadoop/conf',
+        bin_dir = '/usr/bin',
+        try_sleep = 3,
+        user = 'ambari-qa',
     )
     )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/',
+        logoutput = True,
+        tries = 5,
+        conf_dir = '/etc/hadoop/conf',
+        bin_dir = '/usr/bin',
+        try_sleep = 3,
+        user = 'ambari-qa',
     )
     )

+ 0 - 6
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py

@@ -230,9 +230,6 @@ class TestSNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -272,9 +269,6 @@ class TestSNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'root',
                               owner = 'root',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',

+ 0 - 12
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py

@@ -63,9 +63,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -157,9 +154,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'root',
                               owner = 'root',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -251,9 +245,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -320,9 +311,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               owner = 'hdfs',
                               )
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
                               group = 'hadoop',

+ 80 - 192
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -21,7 +21,6 @@ import socket
 import subprocess
 import subprocess
 
 
 from mock.mock import MagicMock, patch
 from mock.mock import MagicMock, patch
-from resource_management.libraries.functions import version
 from resource_management.core import shell
 from resource_management.core import shell
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
 
 
@@ -171,154 +170,89 @@ class TestHiveServer(RMFTestCase):
     self.assertFalse(socket_mock.called)
     self.assertFalse(socket_mock.called)
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
-
-    self.assertResourceCalled('HdfsResource', '/apps/tez/',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'tez',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/tez/lib/',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'tez',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/tez/',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'tez',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/tez/lib/',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'tez',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/webhcat',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hcat',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0755,
+    self.assertResourceCalled('HdfsDirectory', '/apps/tez/',
+                              action = ['create_delayed'],
+                              mode = 0755,
+                              owner = 'tez',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              kinit_path_local = "/usr/bin/kinit"
     )
     )
-    self.assertResourceCalled('HdfsResource', '/user/hcat',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hcat',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0755,
+
+    self.assertResourceCalled('HdfsDirectory', '/apps/tez/lib/',
+                              action = ['create_delayed'],
+                              mode = 0755,
+                              owner = 'tez',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              kinit_path_local = "/usr/bin/kinit"
+    )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create']
     )
     )
-    self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/share/HDP-webhcat/hive.tar.gz',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_delayed'],
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'file',
-        mode = 0755,
+
+    self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/tez*.jar',
+                              mode=0755,
+                              owner='tez',
+                              dest_dir='/apps/tez/',
+                              kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs',
+                              dest_file=None
     )
     )
-    self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
+
+    self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/lib/*.jar',
+                              mode=0755,
+                              owner='tez',
+                              dest_dir='/apps/tez/lib/',
+                              kinnit_if_needed='',
+                              hadoop_bin_dir='/usr/bin',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
         security_enabled = False,
         security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
+        mode = 0777,
         owner = 'hive',
         owner = 'hive',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
+        bin_dir = '/usr/bin',
         action = ['create_delayed'],
         action = ['create_delayed'],
-        mode = 0777,
     )
     )
-    self.assertResourceCalled('HdfsResource', '/user/hive',
+    self.assertResourceCalled('HdfsDirectory', '/user/hive',
         security_enabled = False,
         security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
+        mode = 0700,
         owner = 'hive',
         owner = 'hive',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
+        bin_dir = '/usr/bin',
         action = ['create_delayed'],
         action = ['create_delayed'],
-        mode = 0700,
     )
     )
-    self.assertResourceCalled('HdfsResource', None,
+    self.assertResourceCalled('HdfsDirectory', None,
         security_enabled = False,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        bin_dir = '/usr/bin',
+        action = ['create'],
     )
     )
     self.assertResourceCalled('Directory', '/etc/hive',
     self.assertResourceCalled('Directory', '/etc/hive',
         mode = 0755
         mode = 0755
@@ -434,81 +368,36 @@ class TestHiveServer(RMFTestCase):
     )
     )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsResource', '/apps/webhcat',
+    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
         security_enabled = True,
         security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hcat',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/hcat',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hcat',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        source = '/usr/share/HDP-webhcat/hive.tar.gz',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_delayed'],
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'file',
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
+        bin_dir = '/usr/bin',
+        mode = 0777,
         owner = 'hive',
         owner = 'hive',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
         action = ['create_delayed'],
         action = ['create_delayed'],
-        mode = 0777,
     )
     )
-    self.assertResourceCalled('HdfsResource', '/user/hive',
+    self.assertResourceCalled('HdfsDirectory', '/user/hive',
         security_enabled = True,
         security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
+        mode = 0700,
+        bin_dir = '/usr/bin',
         owner = 'hive',
         owner = 'hive',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
         action = ['create_delayed'],
         action = ['create_delayed'],
-        mode = 0700,
     )
     )
-    self.assertResourceCalled('HdfsResource', None,
+    self.assertResourceCalled('HdfsDirectory', None,
         security_enabled = True,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
+        bin_dir = '/usr/bin',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        action = ['create'],
     )
     )
     self.assertResourceCalled('Directory', '/etc/hive',
     self.assertResourceCalled('Directory', '/etc/hive',
         mode = 0755
         mode = 0755
@@ -652,7 +541,6 @@ class TestHiveServer(RMFTestCase):
   @patch("hive_server.HiveServer.pre_rolling_restart")
   @patch("hive_server.HiveServer.pre_rolling_restart")
   @patch("hive_server.HiveServer.start")
   @patch("hive_server.HiveServer.start")
   @patch.object(shell, "call", new=MagicMock(return_value=(0,"hive-server2 - 2.2.0.0-2041")))
   @patch.object(shell, "call", new=MagicMock(return_value=(0,"hive-server2 - 2.2.0.0-2041")))
-  @patch.object(version, "get_hdp_build_version", new=MagicMock(return_value="2.2.0.0-2041"))
   def test_stop_during_upgrade(self, hive_server_start_mock,
   def test_stop_during_upgrade(self, hive_server_start_mock,
     hive_server_pre_rolling_mock):
     hive_server_pre_rolling_mock):
     
     

+ 134 - 0
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py

@@ -116,6 +116,37 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
     self.assertResourceCalled('Directory', '/var/run/webhcat',
                               owner = 'hcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -133,6 +164,42 @@ class TestWebHCatServer(RMFTestCase):
                               group = 'hadoop',
                               group = 'hadoop',
                               recursive = True,
                               recursive = True,
                               )
                               )
+    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_bin_dir='/usr/bin',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/sqoop*.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_bin_dir='/usr/bin',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -147,6 +214,37 @@ class TestWebHCatServer(RMFTestCase):
                               )
                               )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
     self.assertResourceCalled('Directory', '/var/run/webhcat',
                               owner = 'hcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -168,6 +266,42 @@ class TestWebHCatServer(RMFTestCase):
                               path = ['/bin'],
                               path = ['/bin'],
                               user = 'hcat',
                               user = 'hcat',
                               )
                               )
+    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/sqoop*.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               group = 'hadoop',

+ 21 - 45
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py

@@ -139,28 +139,16 @@ class TestOozieServer(RMFTestCase):
 
 
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsResource', '/user/oozie',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'oozie',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0775,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0775,
+                              owner = 'oozie',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
     )
     )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
                               owner = 'oozie',
                               owner = 'oozie',
@@ -304,29 +292,17 @@ class TestOozieServer(RMFTestCase):
 
 
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsResource', '/user/oozie',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'oozie',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0775,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0775,
+                              owner = 'oozie',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
                               owner = 'oozie',
                               owner = 'oozie',
                               group = 'hadoop',
                               group = 'hadoop',

+ 22 - 90
ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py

@@ -4,7 +4,7 @@
 Licensed to the Apache Software Foundation (ASF) under one
 Licensed to the Apache Software Foundation (ASF) under one
 or more contributor license agreements.  See the NOTICE file
 or more contributor license agreements.  See the NOTICE file
 distributed with this work for additional information
 distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file`
+regarding copyright ownership.  The ASF licenses this file
 to you under the Apache License, Version 2.0 (the
 to you under the Apache License, Version 2.0 (the
 "License"); you may not use this file except in compliance
 "License"); you may not use this file except in compliance
 with the License.  You may obtain a copy of the License at
 with the License.  You may obtain a copy of the License at
@@ -32,50 +32,15 @@ class TestPigServiceCheck(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
-
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'ambari-qa',
-        action = ['delete_delayed'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'ambari-qa',
-        action = ['delete_delayed'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/etc/passwd',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'ambari-qa',
-        action = ['create_delayed'],
-        hadoop_bin_dir = '/usr/bin',
-        type = 'file',
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ',
+      try_sleep = 5,
+      tries = 3,
+      user = 'ambari-qa',
+      conf_dir = '/etc/hadoop/conf',
+      security_enabled = False,
+      keytab = UnknownConfigurationMock(),
+      bin_dir = '/usr/bin',
+      kinit_path_local = '/usr/bin/kinit'
     )
     )
        
        
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
@@ -90,7 +55,7 @@ class TestPigServiceCheck(RMFTestCase):
       try_sleep = 5,
       try_sleep = 5,
     )
     )
        
        
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e pigsmoke.out',
       user = 'ambari-qa',
       user = 'ambari-qa',
       bin_dir = '/usr/bin',
       bin_dir = '/usr/bin',
       conf_dir = '/etc/hadoop/conf',
       conf_dir = '/etc/hadoop/conf',
@@ -105,49 +70,16 @@ class TestPigServiceCheck(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'ambari-qa',
-        action = ['delete_delayed'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'ambari-qa',
-        action = ['delete_delayed'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        source = '/etc/passwd',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'ambari-qa',
-        action = ['create_delayed'],
-        hadoop_bin_dir = '/usr/bin',
-        type = 'file',
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+    
+    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ',
+      try_sleep = 5,
+      tries = 3,
+      user = 'ambari-qa',
+      conf_dir = '/etc/hadoop/conf',
+      security_enabled = True, 
+      keytab = '/etc/security/keytabs/smokeuser.headless.keytab',
+      bin_dir = '/usr/bin',
+      kinit_path_local = '/usr/bin/kinit'
     )
     )
        
        
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
@@ -162,7 +94,7 @@ class TestPigServiceCheck(RMFTestCase):
       try_sleep = 5,
       try_sleep = 5,
     )
     )
        
        
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e pigsmoke.out',
       user = 'ambari-qa',
       user = 'ambari-qa',
       bin_dir = '/usr/bin',
       bin_dir = '/usr/bin',
       conf_dir = '/etc/hadoop/conf',
       conf_dir = '/etc/hadoop/conf',

+ 132 - 156
ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py

@@ -113,84 +113,72 @@ class TestHistoryServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
-
-    self.assertResourceCalled('HdfsResource', '/app-logs',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        user = 'hdfs',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        recursive_chmod = True,
-        owner = 'yarn',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/mapred',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'mapred',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsResource', '/mapred/system',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsResource', '/mr-history/tmp',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'mapred',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/mr-history/done',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'mapred',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              mode = 0777,
+                              bin_dir = '/usr/bin'
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 01777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
       owner = 'yarn',
       owner = 'yarn',
       group = 'hadoop',
       group = 'hadoop',
@@ -317,84 +305,72 @@ class TestHistoryServer(RMFTestCase):
                               )
                               )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
-
-    self.assertResourceCalled('HdfsResource', '/app-logs',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        user = 'hdfs',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        recursive_chmod = True,
-        owner = 'yarn',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/mapred',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'mapred',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsResource', '/mapred/system',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsResource', '/mr-history/tmp',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'mapred',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/mr-history/done',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'mapred',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              bin_dir = '/usr/bin',
+                              mode = 0777,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 01777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
       owner = 'yarn',
       owner = 'yarn',
       group = 'hadoop',
       group = 'hadoop',

+ 0 - 2
ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py

@@ -18,7 +18,6 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 '''
 '''
 from mock.mock import MagicMock, call, patch
 from mock.mock import MagicMock, call, patch
-from resource_management.libraries.functions import version
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
 import os
 import os
 
 
@@ -310,7 +309,6 @@ class TestMapReduce2Client(RMFTestCase):
                               )
                               )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
-  @patch.object(version, "get_hdp_build_version", new=MagicMock(return_value="2.2.0.0-2041"))
   def test_upgrade(self):
   def test_upgrade(self):
     self.executeScript("2.0.6/services/YARN/package/scripts/mapreduce2_client.py",
     self.executeScript("2.0.6/services/YARN/package/scripts/mapreduce2_client.py",
                    classname = "MapReduce2Client",
                    classname = "MapReduce2Client",

+ 132 - 0
ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py

@@ -112,6 +112,72 @@ class TestNodeManager(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              bin_dir = '/usr/bin',
+                              mode = 0777,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 01777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
                               owner = 'yarn',
                               owner = 'yarn',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -270,6 +336,72 @@ class TestNodeManager(RMFTestCase):
                               )
                               )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              bin_dir = '/usr/bin',
+                              mode = 0777,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              bin_dir = '/usr/bin',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 01777,
+                              bin_dir = '/usr/bin',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              bin_dir = '/usr/bin',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
                               owner = 'yarn',
                               owner = 'yarn',
                               group = 'hadoop',
                               group = 'hadoop',

+ 0 - 2
ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py

@@ -19,7 +19,6 @@ limitations under the License.
 '''
 '''
 from mock.mock import MagicMock, call, patch
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
-from resource_management.libraries.functions import version
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 import os
 import os
 
 
@@ -446,7 +445,6 @@ class TestYarnClient(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
 
 
-  @patch.object(version, "get_hdp_build_version", new=MagicMock(return_value="2.2.0.0-2041"))
   def test_upgrade(self):
   def test_upgrade(self):
     self.executeScript("2.0.6/services/YARN/package/scripts/yarn_client.py",
     self.executeScript("2.0.6/services/YARN/package/scripts/yarn_client.py",
                    classname = "YarnClient",
                    classname = "YarnClient",

+ 1 - 3
ambari-server/src/test/python/stacks/2.0.6/configs/default.json

@@ -465,9 +465,7 @@
         "ignore_groupsusers_create": "false",
         "ignore_groupsusers_create": "false",
         "smokeuser": "ambari-qa",
         "smokeuser": "ambari-qa",
         "kerberos_domain": "EXAMPLE.COM",
         "kerberos_domain": "EXAMPLE.COM",
-        "user_group": "hadoop",
-        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
-        "tez_tar_source" : "/usr/hdp/current/tez-client/lib/tez.tar.gz"
+        "user_group": "hadoop"
       },
       },
 
 
       "hbase-env": {
       "hbase-env": {

+ 20 - 23
ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py

@@ -109,29 +109,26 @@ class TestFalconServer(RMFTestCase):
                               properties = self.getConfig()['configurations']['falcon-startup.properties'],
                               properties = self.getConfig()['configurations']['falcon-startup.properties'],
                               owner = 'falcon'
                               owner = 'falcon'
                               )
                               )
-    self.assertResourceCalled('HdfsResource', '/apps/falcon',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'falcon',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/apps/falcon',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'falcon',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/hadoop/falcon',
     self.assertResourceCalled('Directory', '/hadoop/falcon',
                               owner = 'falcon',
                               owner = 'falcon',
                               recursive = True,
                               recursive = True,

+ 0 - 1
ambari-server/src/test/python/unitTests.py

@@ -27,7 +27,6 @@ import shutil
 
 
 #excluded directories with non-test staff from stack and service scanning,
 #excluded directories with non-test staff from stack and service scanning,
 #also we can add service or stack to skip here
 #also we can add service or stack to skip here
-# FIXME: remove this once 1.3.2 stacks is deleted (not supported)
 STACK_EXCLUDE = ["utils", "1.3.2"]
 STACK_EXCLUDE = ["utils", "1.3.2"]
 SERVICE_EXCLUDE = ["configs"]
 SERVICE_EXCLUDE = ["configs"]
 
 

+ 1 - 1
ambari-web/app/config.js

@@ -19,7 +19,7 @@
 
 
 var App = require('app');
 var App = require('app');
 
 
-App.version = ''; // filled out by set-ambari-version.sh script
+App.version = '1.3.0'; // filled out by set-ambari-version.sh script
 App.testMode = (location.port == '3333'); // test mode is automatically enabled if running on brunch server
 App.testMode = (location.port == '3333'); // test mode is automatically enabled if running on brunch server
 App.testModeDelayForActions = 10000;
 App.testModeDelayForActions = 10000;
 App.skipBootstrap = false;
 App.skipBootstrap = false;

+ 0 - 58
contrib/fast-hdfs-resource/dependency-reduced-pom.xml

@@ -1,58 +0,0 @@
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.apache.ambari</groupId>
-  <artifactId>fast-hdfs-resource</artifactId>
-  <name>fast-hdfs-resource</name>
-  <version>0.0.1-SNAPSHOT</version>
-  <url>http://maven.apache.org</url>
-  <build>
-    <plugins>
-      <plugin>
-        <artifactId>maven-shade-plugin</artifactId>
-        <version>2.3</version>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-            <configuration>
-              <transformers>
-                <transformer>
-                  <mainClass>org.apache.ambari.fast_hdfs_resource.Runner</mainClass>
-                </transformer>
-              </transformers>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <repositories>
-    <repository>
-      <id>hdp.internal</id>
-      <url>http://repo1.maven.org/maven2</url>
-    </repository>
-  </repositories>
-  <properties>
-    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-  </properties>
-</project>
-

+ 4 - 4
contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java

@@ -152,15 +152,15 @@ public class Resource {
     if (dfs.isFile(new Path(resource.getTarget()))
     if (dfs.isFile(new Path(resource.getTarget()))
         && !"file".equals(resource.getType()))
         && !"file".equals(resource.getType()))
       throw new IllegalArgumentException(
       throw new IllegalArgumentException(
-          "Cannot create a directory " + resource.getTarget() +
-              " because file is present on the given path.");
+          "Cannot create a file " + resource.getTarget() +
+              " because directory is present on the given path.");
 
 
     // Check consistency for ("type":"directory" == directory in hadoop)
     // Check consistency for ("type":"directory" == directory in hadoop)
     if (dfs.isDirectory(new Path(resource.getTarget()))
     if (dfs.isDirectory(new Path(resource.getTarget()))
         && !"directory".equals(resource.getType()))
         && !"directory".equals(resource.getType()))
       throw new IllegalArgumentException(
       throw new IllegalArgumentException(
-          "Cannot create a file " + resource.getTarget() +
-              " because directory is present on the given path.");
+          "Cannot create a directory " + resource.getTarget() +
+              " because file is present on the given path.");
 
 
   }
   }