Browse Source

AMBARI-4583. Extract common hdfs directories creation code per services
(Eugene Chekanskiy via aonishuk)

Andrew Onischuk 11 years ago
parent
commit
fa47490b58
57 changed files with 1833 additions and 249 deletions
  1. 2 1
      ambari-agent/src/main/python/resource_management/libraries/providers/__init__.py
  2. 109 0
      ambari-agent/src/main/python/resource_management/libraries/providers/hdfs_directory.py
  3. 2 1
      ambari-agent/src/main/python/resource_management/libraries/resources/__init__.py
  4. 44 0
      ambari-agent/src/main/python/resource_management/libraries/resources/hdfs_directory.py
  5. 18 5
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase.py
  6. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_client.py
  7. 1 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_master.py
  8. 1 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_regionserver.py
  9. 21 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
  10. 22 87
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
  11. 22 33
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
  12. 13 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
  13. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_server.py
  14. 22 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
  15. 2 3
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/historyserver.py
  16. 2 3
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py
  17. 30 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py
  18. 22 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
  19. 6 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
  20. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_server.py
  21. 21 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
  22. 25 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
  23. 17 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py
  24. 16 5
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase.py
  25. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_client.py
  26. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py
  27. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py
  28. 21 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
  29. 16 36
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
  30. 13 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
  31. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
  32. 24 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
  33. 7 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
  34. 20 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
  35. 24 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
  36. 18 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
  37. 2 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
  38. 2 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py
  39. 25 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
  40. 34 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
  41. 6 0
      ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py
  42. 19 0
      ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py
  43. 64 10
      ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_master.py
  44. 64 10
      ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_regionserver.py
  45. 56 0
      ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
  46. 116 2
      ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py
  47. 116 2
      ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py
  48. 200 0
      ambari-server/src/test/python/stacks/1.3.2/OOZIE/test_oozie_server.py
  49. 56 0
      ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_server.py
  50. 64 10
      ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
  51. 64 10
      ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
  52. 56 0
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  53. 20 0
      ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
  54. 56 0
      ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py
  55. 121 3
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
  56. 121 3
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
  57. 3 0
      ambari-server/src/test/python/stacks/utils/RMFTestCase.py

+ 2 - 1
ambari-agent/src/main/python/resource_management/libraries/providers/__init__.py

@@ -31,6 +31,7 @@ PROVIDERS = dict(
     XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
     PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
     MonitorWebserver="resource_management.libraries.providers.monitor_webserver.MonitorWebserverProvider",
     MonitorWebserver="resource_management.libraries.providers.monitor_webserver.MonitorWebserverProvider",
-    Repository="resource_management.libraries.providers.repository.RepositoryProvider"
+    Repository="resource_management.libraries.providers.repository.RepositoryProvider",
+    HdfsDirectory="resource_management.libraries.providers.hdfs_directory.HdfsDirectoryProvider"
   ),
   ),
 )
 )

+ 109 - 0
ambari-agent/src/main/python/resource_management/libraries/providers/hdfs_directory.py

@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+directories_list = [] #direcotries list for mkdir
+chmod_map = {} #(mode,recursive):dir_list map
+chown_map = {} #(owner,group,recursive):dir_list map
+class HdfsDirectoryProvider(Provider):
+  def action_create_delayed(self):
+    global delayed_directories
+    global chmod_map
+    global chown_map
+
+    if not self.resource.dir_name:
+      return
+
+    dir_name = self.resource.dir_name
+    dir_owner = self.resource.owner
+    dir_group = self.resource.group
+    dir_mode = oct(self.resource.mode)[1:] if self.resource.mode else None
+    directories_list.append(self.resource.dir_name)
+
+    recursive_chown_str = "-R" if self.resource.recursive_chown else ""
+    recursive_chmod_str = "-R" if self.resource.recursive_chmod else ""
+    # grouping directories by mode/owner/group to modify them in one 'chXXX' call
+    if dir_mode:
+      chmod_key = (dir_mode,recursive_chmod_str)
+      if chmod_map.has_key(chmod_key):
+        chmod_map[chmod_key].append(dir_name)
+      else:
+        chmod_map[chmod_key] = [dir_name]
+
+    if dir_owner:
+      owner_key = (dir_owner,dir_group,recursive_chown_str)
+      if chown_map.has_key(owner_key):
+        chown_map[owner_key].append(dir_name)
+      else:
+        chown_map[owner_key] = [dir_name]
+
+  def action_create(self):
+    global delayed_directories
+    global chmod_map
+    global chown_map
+
+    self.action_create_delayed()
+
+    hdp_conf_dir = self.resource.conf_dir
+    hdp_hdfs_user = self.resource.hdfs_user
+    secured = self.resource.security_enabled
+    keytab_file = self.resource.keytab
+    kinit_path = self.resource.kinit_path_local
+
+    chmod_commands = []
+    chown_commands = []
+
+    for chmod_key, chmod_dirs in chmod_map.items():
+      mode = chmod_key[0]
+      recursive = chmod_key[1]
+      chmod_dirs_str = ' '.join(chmod_dirs)
+      chmod_commands.append(format("hadoop fs -chmod {recursive} {mode} {chmod_dirs_str}"))
+
+    for chown_key, chown_dirs in chown_map.items():
+      owner = chown_key[0]
+      group = chown_key[1]
+      recursive = chown_key[2]
+      chown_dirs_str = ' '.join(chown_dirs)
+      if owner:
+        chown = owner
+        if group:
+          chown = format("{owner}:{group}")
+        chown_commands.append(format("hadoop fs -chown {recursive} {chown} {chown_dirs_str}"))
+
+    if secured:
+        Execute(format("{kinit_path} -kt {keytab_file} {hdp_hdfs_user}"),
+                user=hdp_hdfs_user)
+    #create all directories in one 'mkdir' call
+    dir_list_str = ' '.join(directories_list)
+    #for hadoop 2 we need to specify -p to create directories recursively
+    parent_flag = '`rpm -q hadoop | grep -q "hadoop-1" || echo "-p"`'
+
+    Execute(format('hadoop fs -mkdir {parent_flag} {dir_list_str} && {chmod_cmd} && {chown_cmd}',
+                   chmod_cmd=' && '.join(chmod_commands),
+                   chown_cmd=' && '.join(chown_commands)),
+            user=hdp_hdfs_user,
+            not_if=format("su - {hdp_hdfs_user} -c 'hadoop fs -ls {dir_list_str}'")
+    )
+
+    directories_list[:] = []
+    chmod_map.clear()
+    chown_map.clear()

+ 2 - 1
ambari-agent/src/main/python/resource_management/libraries/resources/__init__.py

@@ -25,4 +25,5 @@ from resource_management.libraries.resources.template_config import *
 from resource_management.libraries.resources.xml_config import *
 from resource_management.libraries.resources.xml_config import *
 from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.repository import *
 from resource_management.libraries.resources.repository import *
-from resource_management.libraries.resources.monitor_webserver import *
+from resource_management.libraries.resources.monitor_webserver import *
+from resource_management.libraries.resources.hdfs_directory import *

+ 44 - 0
ambari-agent/src/main/python/resource_management/libraries/resources/hdfs_directory.py

@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+_all__ = ["HdfsDirectory"]
+from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
+
+class HdfsDirectory(Resource):
+  action = ForcedListArgument()
+
+  dir_name = ResourceArgument(default=lambda obj: obj.name)
+  owner = ResourceArgument()
+  group = ResourceArgument()
+  mode = ResourceArgument()
+  recursive_chown = BooleanArgument(default=False)
+  recursive_chmod = BooleanArgument(default=False)
+
+  conf_dir = ResourceArgument()
+  security_enabled = BooleanArgument(default=False)
+  keytab = ResourceArgument()
+  kinit_path_local = ResourceArgument()
+  hdfs_user = ResourceArgument()
+
+  #action 'create' immediately creates all pending directory in efficient manner
+  #action 'create_delayed' add directory to list of pending directories
+  actions = Resource.actions + ["create","create_delayed"]

+ 18 - 5
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase.py

@@ -22,10 +22,23 @@ import os
 from resource_management import *
 from resource_management import *
 import sys
 import sys
 
 
-def hbase(type=None # 'master' or 'regionserver' or 'client'
+def hbase(name=None # 'master' or 'regionserver' or 'client'
               ):
               ):
   import params
   import params
-  
+
+
+  if name in ["regionserver","master"]:
+    params.HdfsDirectory(params.hbase_hdfs_root_dir,
+                         action="create_delayed",
+                         owner=params.hbase_user
+    )
+    params.HdfsDirectory(params.hbase_staging_dir,
+                         action="create_delayed",
+                         owner=params.hbase_user,
+                         mode=0711
+    )
+    params.HdfsDirectory(None, action="create")
+
   Directory( params.conf_dir,
   Directory( params.conf_dir,
       owner = params.hbase_user,
       owner = params.hbase_user,
       group = params.user_group,
       group = params.user_group,
@@ -67,15 +80,15 @@ def hbase(type=None # 'master' or 'regionserver' or 'client'
   hbase_TemplateConfig( 'hbase-env.sh')     
   hbase_TemplateConfig( 'hbase-env.sh')     
        
        
   hbase_TemplateConfig( params.metric_prop_file_name,
   hbase_TemplateConfig( params.metric_prop_file_name,
-    tag = 'GANGLIA-MASTER' if type == 'master' else 'GANGLIA-RS'
+    tag = 'GANGLIA-MASTER' if name == 'master' else 'GANGLIA-RS'
   )
   )
 
 
   hbase_TemplateConfig( 'regionservers')
   hbase_TemplateConfig( 'regionservers')
 
 
   if params.security_enabled:
   if params.security_enabled:
-    hbase_TemplateConfig( format("hbase_{type}_jaas.conf"))
+    hbase_TemplateConfig( format("hbase_{name}_jaas.conf"))
   
   
-  if type != "client":
+  if name != "client":
     Directory( params.pid_dir,
     Directory( params.pid_dir,
       owner = params.hbase_user,
       owner = params.hbase_user,
       recursive = True
       recursive = True

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_client.py

@@ -33,7 +33,7 @@ class HbaseClient(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
     
     
-    hbase(type='client')
+    hbase(name='client')
 
 
   def status(self, env):
   def status(self, env):
     raise ClientComponentHasNoStatus()
     raise ClientComponentHasNoStatus()

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_master.py

@@ -29,13 +29,12 @@ from hbase_decommission import hbase_decommission
 class HbaseMaster(Script):
 class HbaseMaster(Script):
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)
-    self.configure(env)
     
     
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    hbase(type='master')
+    hbase(name='master')
     
     
   def start(self, env):
   def start(self, env):
     import params
     import params

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_regionserver.py

@@ -28,13 +28,12 @@ from hbase_service import hbase_service
 class HbaseRegionServer(Script):
 class HbaseRegionServer(Script):
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)
-    self.configure(env)
     
     
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    hbase(type='regionserver')
+    hbase(name='regionserver')
       
       
   def start(self, env):
   def start(self, env):
     import params
     import params

+ 21 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py

@@ -98,3 +98,24 @@ if ('hbase-log4j' in config['configurations']):
   log4j_props = config['configurations']['hbase-log4j']
   log4j_props = config['configurations']['hbase-log4j']
 else:
 else:
   log4j_props = None
   log4j_props = None
+
+#hdfs directories
+hbase_hdfs_root_dir = config['configurations']['hbase-site']['hbase.rootdir']
+hbase_staging_dir = "/apps/hbase/staging"
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

+ 22 - 87
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py

@@ -19,18 +19,16 @@ limitations under the License.
 
 
 from resource_management import *
 from resource_management import *
 from utils import service
 from utils import service
-from utils import hdfs_directory
-import urlparse
 
 
 
 
-def namenode(action=None, format=True):
+def namenode(action=None, do_format=True):
   import params
   import params
 
 
   if action == "configure":
   if action == "configure":
     create_name_dirs(params.dfs_name_dir)
     create_name_dirs(params.dfs_name_dir)
 
 
   if action == "start":
   if action == "start":
-    if format:
+    if do_format:
       format_namenode()
       format_namenode()
       pass
       pass
     service(
     service(
@@ -41,9 +39,15 @@ def namenode(action=None, format=True):
       principal=params.dfs_namenode_kerberos_principal
       principal=params.dfs_namenode_kerberos_principal
     )
     )
 
 
-    # TODO: extract creating of dirs to different services
-    create_app_directories()
-    create_user_directories()
+    namenode_safe_mode_off = format("su - {hdfs_user} -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'")
+    if params.security_enabled:
+      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user}"),
+              user = params.hdfs_user)
+    Execute(namenode_safe_mode_off,
+            tries=40,
+            try_sleep=10
+    )
+    create_hdfs_directories()
 
 
   if action == "stop":
   if action == "stop":
     service(
     service(
@@ -66,89 +70,20 @@ def create_name_dirs(directories):
             recursive=True
             recursive=True
   )
   )
 
 
-
-def create_app_directories():
+def create_hdfs_directories():
   import params
   import params
 
 
-  hdfs_directory(name="/tmp",
-                 owner=params.hdfs_user,
-                 mode="777"
+  params.HdfsDirectory("/tmp",
+                       action="create_delayed",
+                       owner=params.hdfs_user,
+                       mode=0777
   )
   )
-  #mapred directories
-  if params.has_jobtracker:
-    hdfs_directory(name="/mapred",
-                   owner=params.mapred_user
-    )
-    hdfs_directory(name="/mapred/system",
-                   owner=params.mapred_user
-    )
-    #hbase directories
-  if len(params.hbase_master_hosts) != 0:
-    hdfs_directory(name=params.hbase_hdfs_root_dir,
-                   owner=params.hbase_user
-    )
-    hdfs_directory(name=params.hbase_staging_dir,
-                   owner=params.hbase_user,
-                   mode="711"
-    )
-    #hive directories
-  if len(params.hive_server_host) != 0:
-    hdfs_directory(name=params.hive_apps_whs_dir,
-                   owner=params.hive_user,
-                   mode="777"
-    )
-  if len(params.hcat_server_hosts) != 0:
-    hdfs_directory(name=params.webhcat_apps_dir,
-                   owner=params.webhcat_user,
-                   mode="755"
-    )
-  if len(params.hs_host) != 0:
-    hdfs_directory(name=params.mapreduce_jobhistory_intermediate_done_dir,
-                   owner=params.mapred_user,
-                   group=params.user_group,
-                   mode="777"
-    )
-
-    hdfs_directory(name=params.mapreduce_jobhistory_done_dir,
-                   owner=params.mapred_user,
-                   group=params.user_group,
-                   mode="777"
-    )
-
-  pass
-
-
-def create_user_directories():
-  import params
-
-  hdfs_directory(name=params.smoke_hdfs_user_dir,
-                 owner=params.smoke_user,
-                 mode=params.smoke_hdfs_user_mode
+  params.HdfsDirectory(params.smoke_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.smoke_user,
+                       mode=params.smoke_hdfs_user_mode
   )
   )
-
-  if params.has_hive_server_host:
-    hdfs_directory(name=params.hive_hdfs_user_dir,
-                   owner=params.hive_user,
-                   mode=params.hive_hdfs_user_mode
-    )
-
-  if params.has_hcat_server_host:
-    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-      hdfs_directory(name=params.hcat_hdfs_user_dir,
-                     owner=params.hcat_user,
-                     mode=params.hcat_hdfs_user_mode
-      )
-    hdfs_directory(name=params.webhcat_hdfs_user_dir,
-                   owner=params.webhcat_user,
-                   mode=params.webhcat_hdfs_user_mode
-    )
-
-  if params.has_oozie_server:
-    hdfs_directory(name=params.oozie_hdfs_user_dir,
-                   owner=params.oozie_user,
-                   mode=params.oozie_hdfs_user_mode
-    )
-
+  params.HdfsDirectory(None, action="create")
 
 
 def format_namenode(force=None):
 def format_namenode(force=None):
   import params
   import params
@@ -191,4 +126,4 @@ def decommission():
                   user=hdfs_user,
                   user=hdfs_user,
                   conf_dir=conf_dir,
                   conf_dir=conf_dir,
                   kinit_override=True)
                   kinit_override=True)
-    pass
+    pass

+ 22 - 33
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py

@@ -123,49 +123,38 @@ dfs_domain_socket_dir = os.path.dirname(dfs_domain_socket_path)
 
 
 hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 
 
-jn_edits_dir = config['configurations']['hdfs-site']['dfs.journalnode.edits.dir']#"/grid/0/hdfs/journal"
+jn_edits_dir = config['configurations']['hdfs-site']['dfs.journalnode.edits.dir']
 
 
-# if stack_version[0] == "2":
-#dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
-# else:
-dfs_name_dir = config['configurations']['hdfs-site']['dfs.name.dir']#","/tmp/hadoop-hdfs/dfs/name")
+dfs_name_dir = config['configurations']['hdfs-site']['dfs.name.dir']
 
 
 namenode_dirs_created_stub_dir = format("{hdfs_log_dir_prefix}/{hdfs_user}")
 namenode_dirs_created_stub_dir = format("{hdfs_log_dir_prefix}/{hdfs_user}")
 namenode_dirs_stub_filename = "namenode_dirs_created"
 namenode_dirs_stub_filename = "namenode_dirs_created"
 
 
-hbase_hdfs_root_dir = config['configurations']['hbase-site']['hbase.rootdir']#","/apps/hbase/data")
-hbase_staging_dir = "/apps/hbase/staging"
-hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"] #, "/apps/hive/warehouse")
-webhcat_apps_dir = "/apps/webhcat"
-mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']#","/app-logs")
-mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']#","/mr-history/done")
-
-if has_oozie_server:
-  oozie_hdfs_user_dir = format("/user/{oozie_user}")
-  oozie_hdfs_user_mode = 775
-if has_hcat_server_host:
-  hcat_hdfs_user_dir = format("/user/{hcat_user}")
-  hcat_hdfs_user_mode = 755
-  webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-  webhcat_hdfs_user_mode = 755
-if has_hive_server_host:
-  hive_hdfs_user_dir = format("/user/{hive_user}")
-  hive_hdfs_user_mode = 700
 smoke_hdfs_user_dir = format("/user/{smoke_user}")
 smoke_hdfs_user_dir = format("/user/{smoke_user}")
-smoke_hdfs_user_mode = 770
+smoke_hdfs_user_mode = 0770
 
 
 namenode_formatted_mark_dir = format("{hadoop_pid_dir_prefix}/hdfs/namenode/formatted/")
 namenode_formatted_mark_dir = format("{hadoop_pid_dir_prefix}/hdfs/namenode/formatted/")
 
 
-# if stack_version[0] == "2":
-#fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir'] #","/tmp/hadoop-hdfs/dfs/namesecondary")
-# else:
-fs_checkpoint_dir = config['configurations']['core-site']['fs.checkpoint.dir']#","/tmp/hadoop-hdfs/dfs/namesecondary")
-
-# if stack_version[0] == "2":
-#dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']#,"/tmp/hadoop-hdfs/dfs/data")
-# else:
-dfs_data_dir = config['configurations']['hdfs-site']['dfs.data.dir']#,"/tmp/hadoop-hdfs/dfs/data")
+fs_checkpoint_dir = config['configurations']['core-site']['fs.checkpoint.dir']
 
 
+dfs_data_dir = config['configurations']['hdfs-site']['dfs.data.dir']
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)
 
 
 
 
 
 

+ 13 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py

@@ -26,6 +26,19 @@ import sys
 def hive(name=None):
 def hive(name=None):
   import params
   import params
 
 
+
+  if name == "hiveserver2":
+    params.HdfsDirectory(params.hive_apps_whs_dir,
+                   action="create_delayed",
+                   owner=params.hive_user,
+                   mode=0777
+    )
+    params.HdfsDirectory(params.hive_hdfs_user_dir,
+                   action="create_delayed",
+                   owner=params.hive_user,
+                   mode=params.hive_hdfs_user_mode
+    )
+    params.HdfsDirectory(None, action="create")
   if name == 'metastore' or name == 'hiveserver2':
   if name == 'metastore' or name == 'hiveserver2':
     hive_config_dir = params.hive_server_conf_dir
     hive_config_dir = params.hive_server_conf_dir
     config_file_mode = 0600
     config_file_mode = 0600

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_server.py

@@ -28,7 +28,7 @@ class HiveServer(Script):
 
 
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)
-    self.configure(env)
+
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)

+ 22 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py

@@ -135,3 +135,25 @@ else:
   log4j_exec_props = None
   log4j_exec_props = None
   
   
 daemon_name = status_params.daemon_name
 daemon_name = status_params.daemon_name
+
+#hdfs directories
+hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
+hive_hdfs_user_dir = format("/user/{hive_user}")
+hive_hdfs_user_mode = 0700
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

+ 2 - 3
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/historyserver.py

@@ -28,12 +28,11 @@ from service import service
 class Historyserver(Script):
 class Historyserver(Script):
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)
-    self.configure(env)
   
   
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    mapreduce()
+    mapreduce(name="historyserver")
 
 
   def start(self, env):
   def start(self, env):
     import params
     import params
@@ -57,4 +56,4 @@ class Historyserver(Script):
      check_process_status(status_params.historyserver_pid_file)
      check_process_status(status_params.historyserver_pid_file)
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
-  Historyserver().execute()
+  Historyserver().execute()

+ 2 - 3
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py

@@ -29,12 +29,11 @@ from service import service
 class Jobtracker(Script):
 class Jobtracker(Script):
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)
-    self.configure(env)
 
 
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    mapreduce()
+    mapreduce(name="jobtracker")
 
 
   def start(self, env):
   def start(self, env):
     import params
     import params
@@ -82,4 +81,4 @@ class Jobtracker(Script):
     pass
     pass
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
-  Jobtracker().execute()
+  Jobtracker().execute()

+ 30 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py

@@ -24,9 +24,37 @@ from resource_management import *
 import sys
 import sys
 
 
 
 
-def mapreduce():
+def mapreduce(name=None):
   import params
   import params
 
 
+
+  if name in ["jobtracker","historyserver"]:
+    params.HdfsDirectory("/mapred",
+                         action="create_delayed",
+                         owner=params.mapred_user
+    )
+    params.HdfsDirectory("/mapred/system",
+                         action="create_delayed",
+                         owner=params.mapred_user
+    )
+    params.HdfsDirectory("/mapred/history",
+                         action="create_delayed",
+                         owner=params.mapred_user
+    )
+    params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
+                         action="create_delayed",
+                         owner=params.mapred_user,
+                         group=params.user_group,
+                         mode=0777
+    )
+    params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
+                         action="create_delayed",
+                         owner=params.mapred_user,
+                         group=params.user_group,
+                         mode=0777
+    )
+    params.HdfsDirectory(None, action="create")
+
   Directory([params.mapred_pid_dir,params.mapred_log_dir],
   Directory([params.mapred_pid_dir,params.mapred_log_dir],
             owner=params.mapred_user,
             owner=params.mapred_user,
             group=params.user_group,
             group=params.user_group,
@@ -47,4 +75,4 @@ def mapreduce():
   File(params.mapred_hosts_file_path,
   File(params.mapred_hosts_file_path,
             owner=params.mapred_user,
             owner=params.mapred_user,
             group=params.user_group,
             group=params.user_group,
-  )
+  )

+ 22 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py

@@ -52,4 +52,25 @@ kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "
 #exclude file
 #exclude file
 mr_exclude_hosts = default("/clusterHostInfo/decom_tt_hosts", [])
 mr_exclude_hosts = default("/clusterHostInfo/decom_tt_hosts", [])
 exclude_file_path = config['configurations']['mapred-site']['mapred.hosts.exclude']
 exclude_file_path = config['configurations']['mapred-site']['mapred.hosts.exclude']
-mapred_hosts_file_path = config['configurations']['mapred-site']['mapred.hosts']
+mapred_hosts_file_path = config['configurations']['mapred-site']['mapred.hosts']
+
+#hdfs directories
+mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']
+mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapred.job.tracker.history.completed.location']
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

+ 6 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py

@@ -5,6 +5,12 @@ def oozie(is_server=False
               ):
               ):
   import params
   import params
 
 
+  if is_server:
+    params.HdfsDirectory(params.oozie_hdfs_user_dir,
+                         action="create",
+                         owner=params.oozie_user,
+                         mode=params.oozie_hdfs_user_mode
+    )
   XmlConfig( "oozie-site.xml",
   XmlConfig( "oozie-site.xml",
     conf_dir = params.conf_dir, 
     conf_dir = params.conf_dir, 
     configurations = params.config['configurations']['oozie-site'],
     configurations = params.config['configurations']['oozie-site'],

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_server.py

@@ -28,7 +28,6 @@ from oozie_service import oozie_service
 class OozieServer(Script):
 class OozieServer(Script):
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)
-    self.configure(env)
     
     
   def configure(self, env):
   def configure(self, env):
     import params
     import params
@@ -39,6 +38,7 @@ class OozieServer(Script):
   def start(self, env):
   def start(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
+    self.configure(env)
     oozie_service(action='start')
     oozie_service(action='start')
     
     
   def stop(self, env):
   def stop(self, env):

+ 21 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py

@@ -68,3 +68,24 @@ if ('oozie-log4j' in config['configurations']):
   log4j_props = config['configurations']['oozie-log4j']
   log4j_props = config['configurations']['oozie-log4j']
 else:
 else:
   log4j_props = None
   log4j_props = None
+
+#hdfs directories
+oozie_hdfs_user_dir = format("/user/{oozie_user}")
+oozie_hdfs_user_mode = 0775
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

+ 25 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py

@@ -26,6 +26,7 @@ import status_params
 # server configurations
 # server configurations
 config = Script.get_config()
 config = Script.get_config()
 
 
+hcat_user = config['configurations']['global']['hcat_user']
 webhcat_user = config['configurations']['global']['webhcat_user']
 webhcat_user = config['configurations']['global']['webhcat_user']
 download_url = config['configurations']['global']['apache_artifacts_download_url']
 download_url = config['configurations']['global']['apache_artifacts_download_url']
 
 
@@ -49,3 +50,27 @@ smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
 security_enabled = config['configurations']['global']['security_enabled']
 security_enabled = config['configurations']['global']['security_enabled']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+
+#hdfs directories
+webhcat_apps_dir = "/apps/webhcat"
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

+ 17 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py

@@ -26,6 +26,23 @@ import sys
 def webhcat():
 def webhcat():
   import params
   import params
 
 
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(params.webhcat_apps_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=0755
+  )
+  params.HdfsDirectory(None, action="create")
   Directory(params.templeton_pid_dir,
   Directory(params.templeton_pid_dir,
             owner=params.webhcat_user,
             owner=params.webhcat_user,
             mode=0755,
             mode=0755,

+ 16 - 5
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase.py

@@ -22,10 +22,21 @@ import os
 from resource_management import *
 from resource_management import *
 import sys
 import sys
 
 
-def hbase(type=None # 'master' or 'regionserver' or 'client'
+def hbase(name=None # 'master' or 'regionserver' or 'client'
               ):
               ):
   import params
   import params
-  
+
+  if name in ["master","regionserver"]:
+    params.HdfsDirectory(params.hbase_hdfs_root_dir,
+                         action="create_delayed",
+                         owner=params.hbase_user
+    )
+    params.HdfsDirectory(params.hbase_staging_dir,
+                         action="create_delayed",
+                         owner=params.hbase_user,
+                         mode=0711
+    )
+    params.HdfsDirectory(None, action="create")
   Directory( params.conf_dir,
   Directory( params.conf_dir,
       owner = params.hbase_user,
       owner = params.hbase_user,
       group = params.user_group,
       group = params.user_group,
@@ -67,15 +78,15 @@ def hbase(type=None # 'master' or 'regionserver' or 'client'
   hbase_TemplateConfig( 'hbase-env.sh')     
   hbase_TemplateConfig( 'hbase-env.sh')     
        
        
   hbase_TemplateConfig( params.metric_prop_file_name,
   hbase_TemplateConfig( params.metric_prop_file_name,
-    tag = 'GANGLIA-MASTER' if type == 'master' else 'GANGLIA-RS'
+    tag = 'GANGLIA-MASTER' if name == 'master' else 'GANGLIA-RS'
   )
   )
 
 
   hbase_TemplateConfig( 'regionservers')
   hbase_TemplateConfig( 'regionservers')
 
 
   if params.security_enabled:
   if params.security_enabled:
-    hbase_TemplateConfig( format("hbase_{type}_jaas.conf"))
+    hbase_TemplateConfig( format("hbase_{name}_jaas.conf"))
   
   
-  if type != "client":
+  if name != "client":
     Directory( params.pid_dir,
     Directory( params.pid_dir,
       owner = params.hbase_user,
       owner = params.hbase_user,
       recursive = True
       recursive = True

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_client.py

@@ -33,7 +33,7 @@ class HbaseClient(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
     
     
-    hbase(type='client')
+    hbase(name='client')
 
 
   def status(self, env):
   def status(self, env):
     raise ClientComponentHasNoStatus()
     raise ClientComponentHasNoStatus()

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py

@@ -29,13 +29,12 @@ from hbase_decommission import hbase_decommission
 class HbaseMaster(Script):
 class HbaseMaster(Script):
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)
-    self.configure(env)
     
     
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    hbase(type='master')
+    hbase(name='master')
     
     
   def start(self, env):
   def start(self, env):
     import params
     import params

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py

@@ -28,13 +28,12 @@ from hbase_service import hbase_service
 class HbaseRegionServer(Script):
 class HbaseRegionServer(Script):
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)
-    self.configure(env)
     
     
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    hbase(type='regionserver')
+    hbase(name='regionserver')
       
       
   def start(self, env):
   def start(self, env):
     import params
     import params

+ 21 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py

@@ -97,3 +97,24 @@ if ('hbase-log4j' in config['configurations']):
   log4j_props = config['configurations']['hbase-log4j']
   log4j_props = config['configurations']['hbase-log4j']
 else:
 else:
   log4j_props = None
   log4j_props = None
+
+
+hbase_hdfs_root_dir = config['configurations']['hbase-site']['hbase.rootdir']
+hbase_staging_dir = "/apps/hbase/staging"
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

+ 16 - 36
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -126,55 +126,26 @@ dfs_domain_socket_dir = os.path.dirname(dfs_domain_socket_path)
 
 
 hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 
 
-jn_edits_dir = config['configurations']['hdfs-site']['dfs.journalnode.edits.dir']#"/grid/0/hdfs/journal"
+jn_edits_dir = config['configurations']['hdfs-site']['dfs.journalnode.edits.dir']
 
 
-# if stack_version[0] == "2":
 dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
 dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
-# else:
-#   dfs_name_dir = default("/configurations/hdfs-site/dfs.name.dir","/tmp/hadoop-hdfs/dfs/name")
 
 
 namenode_dirs_created_stub_dir = format("{hdfs_log_dir_prefix}/{hdfs_user}")
 namenode_dirs_created_stub_dir = format("{hdfs_log_dir_prefix}/{hdfs_user}")
 namenode_dirs_stub_filename = "namenode_dirs_created"
 namenode_dirs_stub_filename = "namenode_dirs_created"
 
 
-hbase_hdfs_root_dir = config['configurations']['hbase-site']['hbase.rootdir']#","/apps/hbase/data")
-hbase_staging_dir = "/apps/hbase/staging"
-hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"] #, "/apps/hive/warehouse")
-webhcat_apps_dir = "/apps/webhcat"
-yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']#","true")
-yarn_nm_app_log_dir =  config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']#","/app-logs")
-mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']#","/app-logs")
-mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']#","/mr-history/done")
-
-if has_oozie_server:
-  oozie_hdfs_user_dir = format("/user/{oozie_user}")
-  oozie_hdfs_user_mode = 775
-if has_hcat_server_host:
-  hcat_hdfs_user_dir = format("/user/{hcat_user}")
-  hcat_hdfs_user_mode = 755
-  webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-  webhcat_hdfs_user_mode = 755
-if has_hive_server_host:
-  hive_hdfs_user_dir = format("/user/{hive_user}")
-  hive_hdfs_user_mode = 700
 smoke_hdfs_user_dir = format("/user/{smoke_user}")
 smoke_hdfs_user_dir = format("/user/{smoke_user}")
-smoke_hdfs_user_mode = 770
+smoke_hdfs_user_mode = 0770
 
 
 namenode_formatted_mark_dir = format("{hadoop_pid_dir_prefix}/hdfs/namenode/formatted/")
 namenode_formatted_mark_dir = format("{hadoop_pid_dir_prefix}/hdfs/namenode/formatted/")
 
 
-# if stack_version[0] == "2":
-fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir'] #","/tmp/hadoop-hdfs/dfs/namesecondary")
-# else:
-#   fs_checkpoint_dir = default("/configurations/core-site/fs.checkpoint.dir","/tmp/hadoop-hdfs/dfs/namesecondary")
-
-# if stack_version[0] == "2":
-dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']#,"/tmp/hadoop-hdfs/dfs/data")
-# else:
-#   dfs_data_dir = default('/configurations/hdfs-site/dfs.data.dir',"/tmp/hadoop-hdfs/dfs/data")
+fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir']
 
 
+dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']
 # HDFS High Availability properties
 # HDFS High Availability properties
 dfs_ha_enabled = False
 dfs_ha_enabled = False
 dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.nameservices", None)
 dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.nameservices", None)
 dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
 dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
+namenode_id = None
 if dfs_ha_namenode_ids:
 if dfs_ha_namenode_ids:
   dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
   dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
   dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
   dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
@@ -185,10 +156,19 @@ if dfs_ha_enabled:
     nn_host = config['configurations']['hdfs-site'][format('dfs.namenode.rpc-address.{dfs_ha_nameservices}.{nn_id}')]
     nn_host = config['configurations']['hdfs-site'][format('dfs.namenode.rpc-address.{dfs_ha_nameservices}.{nn_id}')]
     if hostname in nn_host:
     if hostname in nn_host:
       namenode_id = nn_id
       namenode_id = nn_id
-  namenode_id = None
 
 
 journalnode_address = default('/configurations/hdfs-site/dfs.journalnode.http-address', None)
 journalnode_address = default('/configurations/hdfs-site/dfs.journalnode.http-address', None)
 if journalnode_address:
 if journalnode_address:
   journalnode_port = journalnode_address.split(":")[1]
   journalnode_port = journalnode_address.split(":")[1]
 
 
-falcon_store_uri = config['configurations']['global']['falcon_store_uri']
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

+ 13 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py

@@ -26,6 +26,19 @@ import os
 def hive(name=None):
 def hive(name=None):
   import params
   import params
 
 
+  if name == 'hiveserver2':
+
+    params.HdfsDirectory(params.hive_apps_whs_dir,
+                         action="create_delayed",
+                         owner=params.hive_user,
+                         mode=0777
+    )
+    params.HdfsDirectory(params.hive_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hive_user,
+                         mode=params.hive_hdfs_user_mode
+    )
+    params.HdfsDirectory(None, action="create")
   if name == 'metastore' or name == 'hiveserver2':
   if name == 'metastore' or name == 'hiveserver2':
     hive_config_dir = params.hive_server_conf_dir
     hive_config_dir = params.hive_server_conf_dir
     config_file_mode = 0600
     config_file_mode = 0600

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py

@@ -28,7 +28,7 @@ class HiveServer(Script):
 
 
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)
-    self.configure(env)
+
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)

+ 24 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py

@@ -122,7 +122,7 @@ hcat_user = config['configurations']['global']['hcat_user']
 webhcat_user = config['configurations']['global']['webhcat_user']
 webhcat_user = config['configurations']['global']['webhcat_user']
 
 
 hcat_pid_dir = status_params.hcat_pid_dir
 hcat_pid_dir = status_params.hcat_pid_dir
-hcat_log_dir = config['configurations']['global']['hcat_log_dir']   #hcat_log_dir
+hcat_log_dir = config['configurations']['global']['hcat_log_dir']
 
 
 hadoop_conf_dir = '/etc/hadoop/conf'
 hadoop_conf_dir = '/etc/hadoop/conf'
 
 
@@ -139,3 +139,26 @@ else:
   log4j_exec_props = None
   log4j_exec_props = None
 
 
 daemon_name = status_params.daemon_name
 daemon_name = status_params.daemon_name
+
+hive_hdfs_user_dir = format("/user/{hive_user}")
+hive_hdfs_user_mode = 0700
+hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

+ 7 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py

@@ -24,6 +24,13 @@ from resource_management import *
 def oozie(is_server=False # TODO: see if see can remove this
 def oozie(is_server=False # TODO: see if see can remove this
               ):
               ):
   import params
   import params
+
+  if is_server:
+    params.HdfsDirectory(params.oozie_hdfs_user_dir,
+                         action="create",
+                         owner=params.oozie_user,
+                         mode=params.oozie_hdfs_user_mode
+    )
   #TODO hack for falcon el
   #TODO hack for falcon el
   oozie_site = dict(params.config['configurations']['oozie-site'])
   oozie_site = dict(params.config['configurations']['oozie-site'])
   oozie_site["oozie.services.ext"] = 'org.apache.oozie.service.JMSAccessorService,' + oozie_site["oozie.services.ext"]
   oozie_site["oozie.services.ext"] = 'org.apache.oozie.service.JMSAccessorService,' + oozie_site["oozie.services.ext"]

+ 20 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py

@@ -81,3 +81,23 @@ if ('oozie-log4j' in config['configurations']):
   log4j_props = config['configurations']['oozie-log4j']
   log4j_props = config['configurations']['oozie-log4j']
 else:
 else:
   log4j_props = None
   log4j_props = None
+
+oozie_hdfs_user_dir = format("/user/{oozie_user}")
+oozie_hdfs_user_mode = 0775
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

+ 24 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py

@@ -26,6 +26,7 @@ import status_params
 # server configurations
 # server configurations
 config = Script.get_config()
 config = Script.get_config()
 
 
+hcat_user = config['configurations']['global']['hcat_user']
 webhcat_user = config['configurations']['global']['webhcat_user']
 webhcat_user = config['configurations']['global']['webhcat_user']
 download_url = config['configurations']['global']['apache_artifacts_download_url']
 download_url = config['configurations']['global']['apache_artifacts_download_url']
 
 
@@ -54,3 +55,26 @@ smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
 security_enabled = config['configurations']['global']['security_enabled']
 security_enabled = config['configurations']['global']['security_enabled']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+webhcat_apps_dir = "/apps/webhcat"
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

+ 18 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py

@@ -26,6 +26,24 @@ import sys
 def webhcat():
 def webhcat():
   import params
   import params
 
 
+  params.HdfsDirectory(params.webhcat_apps_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=0755
+  )
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(None, action="create")
+
   Directory(params.templeton_pid_dir,
   Directory(params.templeton_pid_dir,
             owner=params.webhcat_user,
             owner=params.webhcat_user,
             mode=0755,
             mode=0755,

+ 2 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py

@@ -28,12 +28,11 @@ from service import service
 class Histroryserver(Script):
 class Histroryserver(Script):
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)
-    self.configure(env)
 
 
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    yarn()
+    yarn(name="historyserver")
 
 
   def start(self, env):
   def start(self, env):
     import params
     import params
@@ -52,4 +51,4 @@ class Histroryserver(Script):
     check_process_status(status_params.mapred_historyserver_pid_file)
     check_process_status(status_params.mapred_historyserver_pid_file)
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
-  Histroryserver().execute()
+  Histroryserver().execute()

+ 2 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py

@@ -29,12 +29,11 @@ from service import service
 class Nodemanager(Script):
 class Nodemanager(Script):
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)
-    self.configure(env)
 
 
   def configure(self, env):
   def configure(self, env):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    yarn()
+    yarn(name="nodemanager")
 
 
   def start(self, env):
   def start(self, env):
     import params
     import params
@@ -58,4 +57,4 @@ class Nodemanager(Script):
     check_process_status(status_params.nodemanager_pid_file)
     check_process_status(status_params.nodemanager_pid_file)
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
-  Nodemanager().execute()
+  Nodemanager().execute()

+ 25 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py

@@ -88,4 +88,28 @@ yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
 #exclude file
 #exclude file
 exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", [])
 exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", [])
 exclude_file_path = config['configurations']['yarn-site']['yarn.resourcemanager.nodes.exclude-path']
 exclude_file_path = config['configurations']['yarn-site']['yarn.resourcemanager.nodes.exclude-path']
-update_exclude_file_only = config['commandParams']['update_exclude_file_only']
+
+
+yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']
+yarn_nm_app_log_dir =  config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']
+mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']
+mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']
+
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)
+update_exclude_file_only = config['commandParams']['update_exclude_file_only']

+ 34 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py

@@ -24,9 +24,42 @@ from resource_management import *
 import sys
 import sys
 
 
 
 
-def yarn():
+def yarn(name = None):
   import params
   import params
 
 
+
+  if name in ["nodemanager","historyserver"]:
+    if params.yarn_log_aggregation_enabled:
+      params.HdfsDirectory(params.yarn_nm_app_log_dir,
+                           action="create_delayed",
+                           owner=params.yarn_user,
+                           group=params.user_group,
+                           mode=0777,
+                           recursive_chmod=True
+      )
+    params.HdfsDirectory("/mapred",
+                         action="create_delayed",
+                         owner=params.mapred_user
+    )
+    params.HdfsDirectory("/mapred/system",
+                         action="create_delayed",
+                         owner=params.hdfs_user
+    )
+    params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
+                         action="create_delayed",
+                         owner=params.mapred_user,
+                         group=params.user_group,
+                         mode=0777
+    )
+
+    params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
+                         action="create_delayed",
+                         owner=params.mapred_user,
+                         group=params.user_group,
+                         mode=01777
+    )
+    params.HdfsDirectory(None, action="create")
+
   Directory([params.yarn_pid_dir, params.yarn_log_dir],
   Directory([params.yarn_pid_dir, params.yarn_log_dir],
             owner=params.yarn_user,
             owner=params.yarn_user,
             group=params.user_group,
             group=params.user_group,

+ 6 - 0
ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py

@@ -29,6 +29,12 @@ def falcon(type, action = None):
            mode=0644)
            mode=0644)
   elif type == 'server':
   elif type == 'server':
     if action == 'config':
     if action == 'config':
+      if params.store_uri[0:4] == "hdfs":
+        params.HdfsDirectory(params.store_uri,
+                             action="create",
+                             owner=params.falcon_user,
+                             mode=0755
+        )
       Directory(params.falcon_local_dir,
       Directory(params.falcon_local_dir,
                 owner=params.falcon_user,
                 owner=params.falcon_user,
                 recursive=True
                 recursive=True

+ 19 - 0
ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py

@@ -34,3 +34,22 @@ falcon_data_dir = format('{falcon_local_dir}/activemq')
 store_uri = config['configurations']['global']['falcon_store_uri']
 store_uri = config['configurations']['global']['falcon_store_uri']
 falcon_host = config['clusterHostInfo']['falcon_server_hosts'][0]
 falcon_host = config['clusterHostInfo']['falcon_server_hosts'][0]
 falcon_port = config['configurations']['global']['falcon_port']
 falcon_port = config['configurations']['global']['falcon_port']
+
+#for create_hdfs_directory
+security_enabled = config['configurations']['global']['security_enabled']
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

+ 64 - 10
ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_master.py

@@ -164,11 +164,42 @@ class TestHBaseMaster(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
       recursive = True,
       recursive = True,
-    )   
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
@@ -200,11 +231,7 @@ class TestHBaseMaster(RMFTestCase):
     self.assertResourceCalled('Directory', '/var/run/hbase',
     self.assertResourceCalled('Directory', '/var/run/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
-    )   
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )    
+    )
     self.assertResourceCalled('Directory', '/var/log/hbase',
     self.assertResourceCalled('Directory', '/var/log/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
@@ -219,11 +246,42 @@ class TestHBaseMaster(RMFTestCase):
     )
     )
   
   
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
       recursive = True,
       recursive = True,
     )
     )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
@@ -260,10 +318,6 @@ class TestHBaseMaster(RMFTestCase):
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
     )
     )
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )
     self.assertResourceCalled('Directory', '/var/log/hbase',
     self.assertResourceCalled('Directory', '/var/log/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,

+ 64 - 10
ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_regionserver.py

@@ -98,11 +98,42 @@ class TestHbaseRegionServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
       recursive = True,
       recursive = True,
-    )   
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
@@ -134,11 +165,7 @@ class TestHbaseRegionServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/var/run/hbase',
     self.assertResourceCalled('Directory', '/var/run/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
-    )   
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )    
+    )
     self.assertResourceCalled('Directory', '/var/log/hbase',
     self.assertResourceCalled('Directory', '/var/log/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
@@ -154,11 +181,42 @@ class TestHbaseRegionServer(RMFTestCase):
   
   
   
   
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
       recursive = True,
       recursive = True,
     )
     )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
@@ -195,10 +253,6 @@ class TestHbaseRegionServer(RMFTestCase):
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
     )
     )
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )
     self.assertResourceCalled('Directory', '/var/log/hbase',
     self.assertResourceCalled('Directory', '/var/log/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,

+ 56 - 0
ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py

@@ -164,6 +164,34 @@ class TestHiveServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'hive',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hive',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0700,
+                              owner = 'hive',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', 'usr/bin/'],
       path = ['/bin', 'usr/bin/'],
@@ -221,6 +249,34 @@ class TestHiveServer(RMFTestCase):
     )
     )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hive',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hive',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0700,
+                              owner = 'hive',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', 'usr/bin/'],
       path = ['/bin', 'usr/bin/'],

+ 116 - 2
ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py

@@ -81,7 +81,7 @@ class TestHistoryServer(RMFTestCase):
                          config_file="secured.json"
                          config_file="secured.json"
     )
     )
 
 
-    self.assert_configure_default()
+    self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start historyserver',
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start historyserver',
                               user = 'mapred',
                               user = 'mapred',
                               not_if = 'ls /var/run/hadoop/mapred/hadoop-mapred-historyserver.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/mapred/hadoop-mapred-historyserver.pid` >/dev/null 2>&1'
                               not_if = 'ls /var/run/hadoop/mapred/hadoop-mapred-historyserver.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/mapred/hadoop-mapred-historyserver.pid` >/dev/null 2>&1'
@@ -107,6 +107,63 @@ class TestHistoryServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/history',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/history/done',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop/mapred',
     self.assertResourceCalled('Directory', '/var/run/hadoop/mapred',
       owner = 'mapred',
       owner = 'mapred',
       group = 'hadoop',
       group = 'hadoop',
@@ -132,6 +189,63 @@ class TestHistoryServer(RMFTestCase):
     )
     )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/history',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/history/done',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop/mapred',
     self.assertResourceCalled('Directory', '/var/run/hadoop/mapred',
       owner = 'mapred',
       owner = 'mapred',
       group = 'hadoop',
       group = 'hadoop',
@@ -154,4 +268,4 @@ class TestHistoryServer(RMFTestCase):
     self.assertResourceCalled('File', '/etc/hadoop/conf/mapred.include',
     self.assertResourceCalled('File', '/etc/hadoop/conf/mapred.include',
       owner = 'mapred',
       owner = 'mapred',
       group = 'hadoop',
       group = 'hadoop',
-    )
+    )

+ 116 - 2
ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py

@@ -115,7 +115,7 @@ class TestJobtracker(RMFTestCase):
                        config_file="secured.json"
                        config_file="secured.json"
     )
     )
 
 
-    self.assert_configure_default()
+    self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start jobtracker',
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start jobtracker',
                        user = 'mapred',
                        user = 'mapred',
                        not_if = 'ls /var/run/hadoop/mapred/hadoop-mapred-jobtracker.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/mapred/hadoop-mapred-jobtracker.pid` >/dev/null 2>&1'
                        not_if = 'ls /var/run/hadoop/mapred/hadoop-mapred-jobtracker.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/mapred/hadoop-mapred-jobtracker.pid` >/dev/null 2>&1'
@@ -161,6 +161,63 @@ class TestJobtracker(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/history',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/history/done',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop/mapred',
     self.assertResourceCalled('Directory', '/var/run/hadoop/mapred',
       owner = 'mapred',
       owner = 'mapred',
       group = 'hadoop',
       group = 'hadoop',
@@ -186,6 +243,63 @@ class TestJobtracker(RMFTestCase):
     )
     )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/history',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/history/done',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop/mapred',
     self.assertResourceCalled('Directory', '/var/run/hadoop/mapred',
       owner = 'mapred',
       owner = 'mapred',
       group = 'hadoop',
       group = 'hadoop',
@@ -208,4 +322,4 @@ class TestJobtracker(RMFTestCase):
     self.assertResourceCalled('File', '/etc/hadoop/conf/mapred.include',
     self.assertResourceCalled('File', '/etc/hadoop/conf/mapred.include',
       owner = 'mapred',
       owner = 'mapred',
       group = 'hadoop',
       group = 'hadoop',
-    )
+    )

+ 200 - 0
ambari-server/src/test/python/stacks/1.3.2/OOZIE/test_oozie_server.py

@@ -28,6 +28,16 @@ class TestOozieServer(RMFTestCase):
                        command = "configure",
                        command = "configure",
                        config_file="default.json"
                        config_file="default.json"
     )
     )
+    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0775,
+                              owner = 'oozie',
+                              action = ['create'],
+    )
     self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
     self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
       owner = 'oozie',
       owner = 'oozie',
       group = 'hadoop',
       group = 'hadoop',
@@ -114,6 +124,7 @@ class TestOozieServer(RMFTestCase):
                          command = "start",
                          command = "start",
                          config_file="default.json"
                          config_file="default.json"
     )
     )
+    self.configure_default()
     self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run',
     self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run',
       not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
       not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
       ignore_failures = True,
       ignore_failures = True,
@@ -148,6 +159,16 @@ class TestOozieServer(RMFTestCase):
                        command = "configure",
                        command = "configure",
                        config_file="secured.json"
                        config_file="secured.json"
     )
     )
+    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0775,
+                              owner = 'oozie',
+                              action = ['create'],
+    )
     self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
     self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
       owner = 'oozie',
       owner = 'oozie',
       group = 'hadoop',
       group = 'hadoop',
@@ -234,6 +255,7 @@ class TestOozieServer(RMFTestCase):
                          command = "start",
                          command = "start",
                          config_file="secured.json"
                          config_file="secured.json"
     )
     )
+    self.configure_secured()
     self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run',
     self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run',
       not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
       not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
       ignore_failures = True,
       ignore_failures = True,
@@ -260,3 +282,181 @@ class TestOozieServer(RMFTestCase):
       only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
       only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
+
+  def configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0775,
+                              owner = 'oozie',
+                              action = ['create'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              mode = 0664,
+                              conf_dir = '/etc/oozie/conf',
+                              configurations = self.getConfig()['configurations']['oozie-site'],
+                              )
+    self.assertResourceCalled('Directory', '/etc/oozie/conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('TemplateConfig', '/etc/oozie/conf/oozie-env.sh',
+                              owner = 'oozie',
+                              )
+    self.assertResourceCalled('File', '/etc/oozie/conf/adminusers.txt',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/oozie/conf/hadoop-config.xml',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/oozie/conf/oozie-default.xml',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('Directory', '/etc/oozie/conf/action-conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/oozie/conf/action-conf/hive.xml',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/oozie',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/oozie',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/var/tmp/oozie',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/oozie/data',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/var/lib/oozie/',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/webapps/',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz',
+                              not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+                              )
+    self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && mkdir -p /var/tmp/oozie',
+                              not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+                              )
+    self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && chown oozie:hadoop /var/tmp/oozie',
+                              not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+                              )
+    self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 /usr/lib/hadoop/ -extjs /usr/share/HDP-oozie/ext.zip -jars /usr/lib/hadoop/lib/hadoop-lzo-0.5.0.jar:',
+                              not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+                              user = 'oozie',
+                              )
+
+  def configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0775,
+                              owner = 'oozie',
+                              action = ['create'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              mode = 0664,
+                              conf_dir = '/etc/oozie/conf',
+                              configurations = self.getConfig()['configurations']['oozie-site'],
+                              )
+    self.assertResourceCalled('Directory', '/etc/oozie/conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('TemplateConfig', '/etc/oozie/conf/oozie-env.sh',
+                              owner = 'oozie',
+                              )
+    self.assertResourceCalled('File', '/etc/oozie/conf/adminusers.txt',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/oozie/conf/hadoop-config.xml',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/oozie/conf/oozie-default.xml',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('Directory', '/etc/oozie/conf/action-conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/oozie/conf/action-conf/hive.xml',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/oozie',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/oozie',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/var/tmp/oozie',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/oozie/data',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/var/lib/oozie/',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/webapps/',
+                              owner = 'oozie',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz',
+                              not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+                              )
+    self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && mkdir -p /var/tmp/oozie',
+                              not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+                              )
+    self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && chown oozie:hadoop /var/tmp/oozie',
+                              not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+                              )
+    self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 /usr/lib/hadoop/ -extjs /usr/share/HDP-oozie/ext.zip -jars /usr/lib/hadoop/lib/hadoop-lzo-0.5.0.jar:',
+                              not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+                              user = 'oozie',
+                              )

+ 56 - 0
ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_server.py

@@ -96,6 +96,34 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/run/webhcat',
     self.assertResourceCalled('Directory', '/etc/run/webhcat',
       owner = 'hcat',
       owner = 'hcat',
       group = 'hadoop',
       group = 'hadoop',
@@ -140,6 +168,34 @@ class TestWebHCatServer(RMFTestCase):
     )
     )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/run/webhcat',
     self.assertResourceCalled('Directory', '/etc/run/webhcat',
       owner = 'hcat',
       owner = 'hcat',
       group = 'hadoop',
       group = 'hadoop',

+ 64 - 10
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py

@@ -164,11 +164,42 @@ class TestHBaseMaster(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
       recursive = True,
       recursive = True,
-    )   
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
@@ -200,11 +231,7 @@ class TestHBaseMaster(RMFTestCase):
     self.assertResourceCalled('Directory', '/var/run/hbase',
     self.assertResourceCalled('Directory', '/var/run/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
-    )   
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )    
+    )
     self.assertResourceCalled('Directory', '/var/log/hbase',
     self.assertResourceCalled('Directory', '/var/log/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
@@ -219,11 +246,42 @@ class TestHBaseMaster(RMFTestCase):
     )
     )
   
   
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
       recursive = True,
       recursive = True,
     )
     )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
@@ -260,10 +318,6 @@ class TestHBaseMaster(RMFTestCase):
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
     )
     )
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )
     self.assertResourceCalled('Directory', '/var/log/hbase',
     self.assertResourceCalled('Directory', '/var/log/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,

+ 64 - 10
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py

@@ -97,11 +97,42 @@ class TestHbaseRegionServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
       recursive = True,
       recursive = True,
-    )   
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
@@ -133,11 +164,7 @@ class TestHbaseRegionServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/var/run/hbase',
     self.assertResourceCalled('Directory', '/var/run/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
-    )   
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )    
+    )
     self.assertResourceCalled('Directory', '/var/log/hbase',
     self.assertResourceCalled('Directory', '/var/log/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
@@ -152,11 +179,42 @@ class TestHbaseRegionServer(RMFTestCase):
     )
     )
   
   
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
       recursive = True,
       recursive = True,
     )
     )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       owner = 'hbase',
       group = 'hadoop',
       group = 'hadoop',
@@ -193,10 +251,6 @@ class TestHbaseRegionServer(RMFTestCase):
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,
     )
     )
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )
     self.assertResourceCalled('Directory', '/var/log/hbase',
     self.assertResourceCalled('Directory', '/var/log/hbase',
       owner = 'hbase',
       owner = 'hbase',
       recursive = True,
       recursive = True,

+ 56 - 0
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -164,6 +164,34 @@ class TestHiveServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'hive',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hive',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0700,
+                              owner = 'hive',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', 'usr/bin/'],
       path = ['/bin', 'usr/bin/'],
@@ -221,6 +249,34 @@ class TestHiveServer(RMFTestCase):
     )
     )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hive',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hive',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0700,
+                              owner = 'hive',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', 'usr/bin/'],
       path = ['/bin', 'usr/bin/'],

+ 20 - 0
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py

@@ -114,6 +114,16 @@ class TestOozieServer(RMFTestCase):
     # Hack for oozie.py changing conf on fly
     # Hack for oozie.py changing conf on fly
     oozie_site = self.getConfig()['configurations']['oozie-site'].copy()
     oozie_site = self.getConfig()['configurations']['oozie-site'].copy()
     oozie_site["oozie.services.ext"] = 'org.apache.oozie.service.JMSAccessorService,' + oozie_site["oozie.services.ext"]
     oozie_site["oozie.services.ext"] = 'org.apache.oozie.service.JMSAccessorService,' + oozie_site["oozie.services.ext"]
+    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0775,
+                              owner = 'oozie',
+                              action = ['create'],
+    )
     self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
     self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
                               owner = 'oozie',
                               owner = 'oozie',
                               group = 'hadoop',
                               group = 'hadoop',
@@ -197,6 +207,16 @@ class TestOozieServer(RMFTestCase):
     # Hack for oozie.py changing conf on fly
     # Hack for oozie.py changing conf on fly
     oozie_site = self.getConfig()['configurations']['oozie-site'].copy()
     oozie_site = self.getConfig()['configurations']['oozie-site'].copy()
     oozie_site["oozie.services.ext"] = 'org.apache.oozie.service.JMSAccessorService,' + oozie_site["oozie.services.ext"]
     oozie_site["oozie.services.ext"] = 'org.apache.oozie.service.JMSAccessorService,' + oozie_site["oozie.services.ext"]
+    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0775,
+                              owner = 'oozie',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
     self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
                               owner = 'oozie',
                               owner = 'oozie',
                               group = 'hadoop',
                               group = 'hadoop',

+ 56 - 0
ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py

@@ -99,6 +99,34 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
     self.assertResourceCalled('Directory', '/var/run/webhcat',
       owner = 'hcat',
       owner = 'hcat',
       group = 'hadoop',
       group = 'hadoop',
@@ -143,6 +171,34 @@ class TestWebHCatServer(RMFTestCase):
     )
     )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
     self.assertResourceCalled('Directory', '/var/run/webhcat',
       owner = 'hcat',
       owner = 'hcat',
       group = 'hadoop',
       group = 'hadoop',

+ 121 - 3
ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py

@@ -110,7 +110,66 @@ class TestHistoryServer(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
-
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              mode = 0777,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hdfs',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 01777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
       owner = 'yarn',
       owner = 'yarn',
       group = 'hadoop',
       group = 'hadoop',
@@ -195,7 +254,66 @@ class TestHistoryServer(RMFTestCase):
     )
     )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
-
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              mode = 0777,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hdfs',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 01777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
       owner = 'yarn',
       owner = 'yarn',
       group = 'hadoop',
       group = 'hadoop',
@@ -286,4 +404,4 @@ class TestHistoryServer(RMFTestCase):
       content = Template('container-executor.cfg.j2'),
       content = Template('container-executor.cfg.j2'),
       group = 'hadoop',
       group = 'hadoop',
       mode = 420,
       mode = 420,
-    )
+    )

+ 121 - 3
ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py

@@ -110,7 +110,66 @@ class TestNodeManager(RMFTestCase):
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def assert_configure_default(self):
   def assert_configure_default(self):
-
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              mode = 0777,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hdfs',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 01777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
       owner = 'yarn',
       owner = 'yarn',
       group = 'hadoop',
       group = 'hadoop',
@@ -195,7 +254,66 @@ class TestNodeManager(RMFTestCase):
     )
     )
 
 
   def assert_configure_secured(self):
   def assert_configure_secured(self):
-
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              mode = 0777,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hdfs',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 01777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
       owner = 'yarn',
       owner = 'yarn',
       group = 'hadoop',
       group = 'hadoop',
@@ -287,4 +405,4 @@ class TestNodeManager(RMFTestCase):
       group = 'hadoop',
       group = 'hadoop',
       mode = 420,
       mode = 420,
     )
     )
-    
+    

+ 3 - 0
ambari-server/src/test/python/stacks/utils/RMFTestCase.py

@@ -161,3 +161,6 @@ class UnknownConfigurationMock():
   def __eq__(self, other):
   def __eq__(self, other):
     return isinstance(other, UnknownConfiguration)
     return isinstance(other, UnknownConfiguration)
 
 
+  def __ne__(self, other):
+    return not self.__eq__(other)
+