Browse Source

AMBARI-12837. Propogate service type information to dictionary (Vijay Srinivasaraghavan via smohanty)

Sumit Mohanty 9 years ago
parent
commit
93a2106a6e
43 changed files with 272 additions and 45 deletions
  1. 9 5
      ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
  2. 3 0
      ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
  3. 14 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
  4. 14 5
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
  5. 5 1
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
  6. 4 2
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
  7. 3 0
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
  8. 4 1
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
  9. 5 2
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
  10. 4 1
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
  11. 4 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
  12. 4 1
      ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
  13. 5 1
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
  14. 4 1
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
  15. 3 1
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
  16. 4 1
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
  17. 2 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
  18. 5 2
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
  19. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
  20. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
  21. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py
  22. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
  23. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
  24. 3 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
  25. 6 5
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
  26. 2 0
      ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
  27. 9 0
      ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
  28. 24 0
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
  29. 4 0
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
  30. 14 0
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  31. 6 0
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
  32. 17 1
      ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
  33. 9 0
      ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
  34. 6 0
      ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
  35. 15 1
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
  36. 6 0
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
  37. 6 0
      ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
  38. 3 0
      ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
  39. 6 0
      ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
  40. 6 0
      ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
  41. 4 0
      ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
  42. 15 0
      ambari-web/app/data/HDP2/site_properties.js
  43. 6 2
      contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Runner.java

+ 9 - 5
ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py

@@ -50,7 +50,8 @@ RESOURCE_TO_JSON_FIELDS = {
   'mode': 'mode',
   'mode': 'mode',
   'recursive_chown': 'recursiveChown',
   'recursive_chown': 'recursiveChown',
   'recursive_chmod': 'recursiveChmod',
   'recursive_chmod': 'recursiveChmod',
-  'change_permissions_for_parents': 'changePermissionforParents'
+  'change_permissions_for_parents': 'changePermissionforParents',
+  'dfs_type': 'dfs_type'
 }
 }
 
 
 class HdfsResourceJar:
 class HdfsResourceJar:
@@ -381,9 +382,10 @@ class HdfsResourceWebHDFS:
 class HdfsResourceProvider(Provider):
 class HdfsResourceProvider(Provider):
   def __init__(self, resource):
   def __init__(self, resource):
     super(HdfsResourceProvider,self).__init__(resource)
     super(HdfsResourceProvider,self).__init__(resource)
-    self.assert_parameter_is_set('hdfs_site')
-    
-    self.webhdfs_enabled = self.resource.hdfs_site['dfs.webhdfs.enabled']
+    self.fsType = getattr(resource, 'dfs_type')
+    if self.fsType != 'HCFS':
+      self.assert_parameter_is_set('hdfs_site')
+      self.webhdfs_enabled = self.resource.hdfs_site['dfs.webhdfs.enabled']
     
     
   def action_delayed(self, action_name):
   def action_delayed(self, action_name):
     self.assert_parameter_is_set('type')
     self.assert_parameter_is_set('type')
@@ -400,7 +402,9 @@ class HdfsResourceProvider(Provider):
     self.get_hdfs_resource_executor().action_execute(self)
     self.get_hdfs_resource_executor().action_execute(self)
 
 
   def get_hdfs_resource_executor(self):
   def get_hdfs_resource_executor(self):
-    if WebHDFSUtil.is_webhdfs_available(self.webhdfs_enabled, self.resource.default_fs):
+    if self.fsType == 'HCFS':
+      return HdfsResourceJar()
+    elif WebHDFSUtil.is_webhdfs_available(self.webhdfs_enabled, self.resource.default_fs):
       return HdfsResourceWebHDFS()
       return HdfsResourceWebHDFS()
     else:
     else:
       return HdfsResourceJar()
       return HdfsResourceJar()

+ 3 - 0
ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py

@@ -80,6 +80,9 @@ class HdfsResource(Resource):
   hdfs_site = ResourceArgument()
   hdfs_site = ResourceArgument()
   default_fs = ResourceArgument()
   default_fs = ResourceArgument()
 
 
+  # To support HCFS
+  dfs_type = ResourceArgument(default="")
+
   #action 'execute' immediately creates all pending files/directories in efficient manner
   #action 'execute' immediately creates all pending files/directories in efficient manner
   #action 'create_delayed/delete_delayed' adds file/directory to list of pending directories
   #action 'create_delayed/delete_delayed' adds file/directory to list of pending directories
   actions = Resource.actions + ["create_on_execute", "delete_on_execute", "execute"]
   actions = Resource.actions + ["create_on_execute", "delete_on_execute", "execute"]

+ 14 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java

@@ -48,6 +48,7 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.HashSet;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
+import java.util.Iterator;
 import java.util.Set;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeMap;
 
 
@@ -1059,6 +1060,19 @@ public class AmbariCustomCommandExecutionHelper {
         hostParamsStage.put(CLIENTS_TO_UPDATE_CONFIGS, clientsToUpdateConfigs);
         hostParamsStage.put(CLIENTS_TO_UPDATE_CONFIGS, clientsToUpdateConfigs);
       }
       }
       clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
       clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
+
+      //Propogate HCFS service type info to command params
+      Iterator<Service> it = cluster.getServices().values().iterator();
+      while(it.hasNext()) {
+          ServiceInfo serviceInfoInstance = ambariMetaInfo.getService(stackId.getStackName(),stackId.getStackVersion(), it.next().getName());
+          LOG.info("Iterating service type Instance in getCommandJson:: " + serviceInfoInstance.getName());
+          if(serviceInfoInstance.getServiceType() != null) {
+              LOG.info("Adding service type info in getCommandJson:: " + serviceInfoInstance.getServiceType());
+              commandParamsStage.put("dfs_type",serviceInfoInstance.getServiceType());
+              break;
+          }
+      }      
+
     }
     }
 
 
     String hostParamsStageJson = StageUtils.getGson().toJson(hostParamsStage);
     String hostParamsStageJson = StageUtils.getGson().toJson(hostParamsStage);

+ 14 - 5
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java

@@ -50,6 +50,7 @@ import java.util.Collections;
 import java.util.EnumMap;
 import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.LinkedHashSet;
 import java.util.LinkedHashSet;
 import java.util.LinkedList;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.List;
@@ -1882,20 +1883,28 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       }
       }
     }
     }
 
 
-    LOG.info("Adding service type info in createHostAction:: " + serviceInfo.getServiceType());
-    execCmd.setServiceType(serviceInfo.getServiceType());
-
     execCmd.setConfigurations(configurations);
     execCmd.setConfigurations(configurations);
     execCmd.setConfigurationAttributes(configurationAttributes);
     execCmd.setConfigurationAttributes(configurationAttributes);
     execCmd.setConfigurationTags(configTags);
     execCmd.setConfigurationTags(configTags);
 
 
-
-
     // Create a local copy for each command
     // Create a local copy for each command
     Map<String, String> commandParams = new TreeMap<String, String>();
     Map<String, String> commandParams = new TreeMap<String, String>();
     if (commandParamsInp != null) { // if not defined
     if (commandParamsInp != null) { // if not defined
       commandParams.putAll(commandParamsInp);
       commandParams.putAll(commandParamsInp);
     }
     }
+
+    //Propogate HCFS service type info
+    Iterator<Service> it = cluster.getServices().values().iterator();
+    while(it.hasNext()) {
+    	ServiceInfo serviceInfoInstance = ambariMetaInfo.getService(stackId.getStackName(),stackId.getStackVersion(), it.next().getName());
+    	LOG.info("Iterating service type Instance in createHostAction:: " + serviceInfoInstance.getName());
+    	if(serviceInfoInstance.getServiceType() != null) {
+    	    LOG.info("Adding service type info in createHostAction:: " + serviceInfoInstance.getServiceType());
+            commandParams.put("dfs_type",serviceInfoInstance.getServiceType());
+    	    break;
+    	}
+    }
+
     boolean isInstallCommand = roleCommand.equals(RoleCommand.INSTALL);
     boolean isInstallCommand = roleCommand.equals(RoleCommand.INSTALL);
     String agentDefaultCommandTimeout = configs.getDefaultAgentTaskTimeout(isInstallCommand);
     String agentDefaultCommandTimeout = configs.getDefaultAgentTaskTimeout(isInstallCommand);
     String scriptCommandTimeout = "";
     String scriptCommandTimeout = "";

+ 5 - 1
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py

@@ -158,6 +158,9 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
 
 
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
+
+dfs_type = default("/commandParams/dfs_type", "")
+
 # dfs.namenode.https-address
 # dfs.namenode.https-address
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
@@ -172,5 +175,6 @@ HdfsResource = functools.partial(
   hadoop_conf_dir = hadoop_conf_dir,
   hadoop_conf_dir = hadoop_conf_dir,
   principal_name = hdfs_principal_name,
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   hdfs_site = hdfs_site,
-  default_fs = default_fs
+  default_fs = default_fs,
+  dfs_type = dfs_type
 )
 )

+ 4 - 2
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py

@@ -183,14 +183,16 @@ def hbase(name=None # 'master' or 'regionserver' or 'client'
                              type="directory",
                              type="directory",
                              action="create_on_execute",
                              action="create_on_execute",
                              owner=params.hbase_user,
                              owner=params.hbase_user,
-                             mode=0775
+                             mode=0775,
+                             dfs_type=params.dfs_type
         )
         )
 
 
         params.HdfsResource(params.hbase_staging_dir,
         params.HdfsResource(params.hbase_staging_dir,
                              type="directory",
                              type="directory",
                              action="create_on_execute",
                              action="create_on_execute",
                              owner=params.hbase_user,
                              owner=params.hbase_user,
-                             mode=0711
+                             mode=0711,
+                             dfs_type=params.dfs_type
         )
         )
 
 
         params.HdfsResource(None, action="execute")
         params.HdfsResource(None, action="execute")

+ 3 - 0
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py

@@ -48,3 +48,6 @@ hbase_conf_dir = "/etc/ams-hbase/conf"
 
 
 limits_conf_dir = "/etc/security/limits.d"
 limits_conf_dir = "/etc/security/limits.d"
 sudo = AMBARI_SUDO_BINARY
 sudo = AMBARI_SUDO_BINARY
+
+dfs_type = default("/commandParams/dfs_type", "")
+

+ 4 - 1
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py

@@ -111,6 +111,8 @@ dfs_data_mirroring_dir = "/apps/data-mirroring"
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
 
+dfs_type = default("/commandParams/dfs_type", "")
+
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
@@ -124,6 +126,7 @@ HdfsResource = functools.partial(
   hadoop_conf_dir = hadoop_conf_dir,
   hadoop_conf_dir = hadoop_conf_dir,
   principal_name = hdfs_principal_name,
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   hdfs_site = hdfs_site,
-  default_fs = default_fs
+  default_fs = default_fs,
+  dfs_type = dfs_type
  )
  )
 
 

+ 5 - 2
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py

@@ -35,7 +35,6 @@ from resource_management.libraries.functions import is_empty
 from resource_management.libraries.functions import get_unique_id_and_date
 from resource_management.libraries.functions import get_unique_id_and_date
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 
 
-
 from resource_management.libraries.functions.substitute_vars import substitute_vars
 from resource_management.libraries.functions.substitute_vars import substitute_vars
 
 
 # server configurations
 # server configurations
@@ -188,6 +187,9 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
 
 
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
+
+dfs_type = default("/commandParams/dfs_type", "")
+
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
@@ -201,7 +203,8 @@ HdfsResource = functools.partial(
   hadoop_conf_dir = hadoop_conf_dir,
   hadoop_conf_dir = hadoop_conf_dir,
   principal_name = hdfs_principal_name,
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   hdfs_site = hdfs_site,
-  default_fs = default_fs
+  default_fs = default_fs,
+  dfs_type = dfs_type
 )
 )
 
 
 # ranger host
 # ranger host

+ 4 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py

@@ -313,6 +313,8 @@ else:
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
 
+dfs_type = default("/commandParams/dfs_type", "")
+
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
 #to create/delete/copyfromlocal hdfs directories/files we need to call params.HdfsResource in code
 #to create/delete/copyfromlocal hdfs directories/files we need to call params.HdfsResource in code
@@ -326,7 +328,8 @@ HdfsResource = functools.partial(
   hadoop_conf_dir = hadoop_conf_dir,
   hadoop_conf_dir = hadoop_conf_dir,
   principal_name = hdfs_principal_name,
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   hdfs_site = hdfs_site,
-  default_fs = default_fs
+  default_fs = default_fs,
+  dfs_type = dfs_type
 )
 )
 
 
 
 

+ 4 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py

@@ -443,6 +443,8 @@ security_param = "true" if security_enabled else "false"
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
 
+dfs_type = default("/commandParams/dfs_type", "")
+
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
 #to create hdfs directory we need to call params.HdfsResource in code
 #to create hdfs directory we need to call params.HdfsResource in code
@@ -456,7 +458,8 @@ HdfsResource = functools.partial(
   hadoop_conf_dir = hadoop_conf_dir,
   hadoop_conf_dir = hadoop_conf_dir,
   principal_name = hdfs_principal_name,
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   hdfs_site = hdfs_site,
-  default_fs = default_fs
+  default_fs = default_fs,
+  dfs_type = dfs_type
  )
  )
 
 
 
 

+ 4 - 1
ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py

@@ -71,6 +71,8 @@ log4j_props = config['configurations']['mahout-log4j']['content']
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
 
+dfs_type = default("/commandParams/dfs_type", "")
+
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
@@ -84,5 +86,6 @@ HdfsResource = functools.partial(
   hadoop_conf_dir = hadoop_conf_dir,
   hadoop_conf_dir = hadoop_conf_dir,
   principal_name = hdfs_principal_name,
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   hdfs_site = hdfs_site,
-  default_fs = default_fs
+  default_fs = default_fs,
+  dfs_type = dfs_type
 )
 )

+ 5 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py

@@ -246,6 +246,9 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
 
 
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
+
+dfs_type = default("/commandParams/dfs_type", "")
+
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
@@ -259,7 +262,8 @@ HdfsResource = functools.partial(
   hadoop_conf_dir = hadoop_conf_dir,
   hadoop_conf_dir = hadoop_conf_dir,
   principal_name = hdfs_principal_name,
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   hdfs_site = hdfs_site,
-  default_fs = default_fs
+  default_fs = default_fs,
+  dfs_type = dfs_type
 )
 )
 
 
 is_webhdfs_enabled = config['configurations']['hdfs-site']['dfs.webhdfs.enabled']
 is_webhdfs_enabled = config['configurations']['hdfs-site']['dfs.webhdfs.enabled']

+ 4 - 1
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py

@@ -75,6 +75,8 @@ log4j_props = config['configurations']['pig-log4j']['content']
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
 
+dfs_type = default("/commandParams/dfs_type", "")
+
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
 #to create hdfs directory we need to call params.HdfsResource in code
 #to create hdfs directory we need to call params.HdfsResource in code
@@ -88,6 +90,7 @@ HdfsResource = functools.partial(
   hadoop_conf_dir = hadoop_conf_dir,
   hadoop_conf_dir = hadoop_conf_dir,
   principal_name = hdfs_principal_name,
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   hdfs_site = hdfs_site,
-  default_fs = default_fs
+  default_fs = default_fs,
+  dfs_type = dfs_type
  )
  )
 
 

+ 3 - 1
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py

@@ -156,6 +156,7 @@ if security_enabled:
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 
 
+dfs_type = default("/commandParams/dfs_type", "")
 
 
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
@@ -170,5 +171,6 @@ HdfsResource = functools.partial(
   hadoop_conf_dir = hadoop_conf_dir,
   hadoop_conf_dir = hadoop_conf_dir,
   principal_name = hdfs_principal_name,
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   hdfs_site = hdfs_site,
-  default_fs = default_fs
+  default_fs = default_fs,
+  dfs_type = dfs_type
  )
  )

+ 4 - 1
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py

@@ -79,6 +79,8 @@ tez_env_sh_template = config['configurations']['tez-env']['content']
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
 
+dfs_type = default("/commandParams/dfs_type", "")
+
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
 #to create/delete/copyfromlocal hdfs directories/files we need to call params.HdfsResource in code
 #to create/delete/copyfromlocal hdfs directories/files we need to call params.HdfsResource in code
@@ -92,7 +94,8 @@ HdfsResource = functools.partial(
   hadoop_conf_dir = hadoop_conf_dir,
   hadoop_conf_dir = hadoop_conf_dir,
   principal_name = hdfs_principal_name,
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   hdfs_site = hdfs_site,
-  default_fs = default_fs
+  default_fs = default_fs,
+  dfs_type = dfs_type
 )
 )
 
 
 
 

+ 2 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py

@@ -123,11 +123,13 @@ class MapReduce2ServiceCheckDefault(MapReduce2ServiceCheck):
     params.HdfsResource(output_file,
     params.HdfsResource(output_file,
                         action = "delete_on_execute",
                         action = "delete_on_execute",
                         type = "directory",
                         type = "directory",
+                        dfs_type = params.dfs_type,
     )
     )
     params.HdfsResource(input_file,
     params.HdfsResource(input_file,
                         action = "create_on_execute",
                         action = "create_on_execute",
                         type = "file",
                         type = "file",
                         source = "/etc/passwd",
                         source = "/etc/passwd",
+                        dfs_type = params.dfs_type,
     )
     )
     params.HdfsResource(None, action="execute")
     params.HdfsResource(None, action="execute")
 
 

+ 5 - 2
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py

@@ -30,7 +30,6 @@ from resource_management.libraries.functions.version import format_hdp_stack_ver
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries import functions
 from resource_management.libraries import functions
 
 
-
 import status_params
 import status_params
 
 
 # a map of the Ambari role to the component name
 # a map of the Ambari role to the component name
@@ -250,6 +249,9 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
 
+dfs_type = default("/commandParams/dfs_type", "")
+
+
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
@@ -263,7 +265,8 @@ HdfsResource = functools.partial(
   hadoop_conf_dir = hadoop_conf_dir,
   hadoop_conf_dir = hadoop_conf_dir,
   principal_name = hdfs_principal_name,
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   hdfs_site = hdfs_site,
-  default_fs = default_fs
+  default_fs = default_fs,
+  dfs_type = dfs_type
  )
  )
 update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
 update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
 
 

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py

@@ -30,7 +30,7 @@ from ambari_commons.os_check import OSCheck
 
 
 config = Script.get_config()
 config = Script.get_config()
 
 
-service_type = default("serviceType","")
+dfs_type = default("/commandParams/dfs_type", "")
 
 
 sudo = AMBARI_SUDO_BINARY
 sudo = AMBARI_SUDO_BINARY
 
 
@@ -91,5 +91,5 @@ user_group = config['configurations']['cluster-env']['user_group']
 namenode_host = default("/clusterHostInfo/namenode_host", [])
 namenode_host = default("/clusterHostInfo/namenode_host", [])
 has_namenode = not len(namenode_host) == 0
 has_namenode = not len(namenode_host) == 0
 
 
-if has_namenode:
+if has_namenode or dfs_type == 'HCFS':
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -45,8 +45,8 @@ def setup_hdp_install_directory():
 def setup_config():
 def setup_config():
   import params
   import params
   stackversion = params.stack_version_unformatted
   stackversion = params.stack_version_unformatted
-  Logger.info("Service Type: {0}".format(params.service_type))
-  if params.has_namenode or stackversion.find('Gluster') >= 0 or params.service_type == 'HCFS':
+  Logger.info("FS Type: {0}".format(params.dfs_type))
+  if params.has_namenode or stackversion.find('Gluster') >= 0 or params.dfs_type == 'HCFS':
     # create core-site only if the hadoop config diretory exists
     # create core-site only if the hadoop config diretory exists
     XmlConfig("core-site.xml",
     XmlConfig("core-site.xml",
               conf_dir=params.hadoop_conf_dir,
               conf_dir=params.hadoop_conf_dir,

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py

@@ -27,7 +27,7 @@ class BeforeAnyHook(Hook):
     env.set_params(params)
     env.set_params(params)
 
 
     setup_users()
     setup_users()
-    if params.has_namenode:
+    if params.has_namenode or params.dfs_type == 'HCFS':
       setup_hadoop_env()
       setup_hadoop_env()
     setup_java()
     setup_java()
 
 

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py

@@ -38,7 +38,7 @@ from ambari_commons.constants import AMBARI_SUDO_BINARY
 config = Script.get_config()
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 tmp_dir = Script.get_tmp_dir()
 
 
-service_type = default("serviceType","")
+dfs_type = default("/commandParams/dfs_type", "")
 
 
 artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 jdk_name = default("/hostLevelParams/jdk_name", None)
 jdk_name = default("/hostLevelParams/jdk_name", None)
@@ -180,7 +180,7 @@ has_oozie_server = not len(oozie_servers) == 0
 has_falcon_server_hosts = not len(falcon_server_hosts) == 0
 has_falcon_server_hosts = not len(falcon_server_hosts) == 0
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 
 
-if has_namenode:
+if has_namenode or dfs_type == 'HCFS':
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
 
 
 hbase_tmp_dir = "/tmp/hbase-hbase"
 hbase_tmp_dir = "/tmp/hbase-hbase"

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py

@@ -132,8 +132,8 @@ def set_uid(user, user_dirs):
 def setup_hadoop_env():
 def setup_hadoop_env():
   import params
   import params
   stackversion = params.stack_version_unformatted
   stackversion = params.stack_version_unformatted
-  Logger.info("Service Type: {0}".format(params.service_type))
-  if params.has_namenode or stackversion.find('Gluster') >= 0 or params.service_type == 'HCFS':
+  Logger.info("FS Type: {0}".format(params.dfs_type))
+  if params.has_namenode or stackversion.find('Gluster') >= 0 or params.dfs_type == 'HCFS':
     if params.security_enabled:
     if params.security_enabled:
       tc_owner = "root"
       tc_owner = "root"
     else:
     else:

+ 3 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py

@@ -28,12 +28,13 @@ from resource_management.libraries.functions.version import format_hdp_stack_ver
 from ambari_commons.os_check import OSCheck
 from ambari_commons.os_check import OSCheck
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 
 
-
 config = Script.get_config()
 config = Script.get_config()
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 
+dfs_type = default("/commandParams/dfs_type", "")
+
 # hadoop default params
 # hadoop default params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 
 
@@ -108,7 +109,7 @@ metrics_collection_period = default("/configurations/ams-site/timeline.metrics.s
 
 
 #hadoop params
 #hadoop params
 
 
-if has_namenode:
+if has_namenode or dfs_type == 'HCFS':
   hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
   hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
   task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
   task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")

+ 6 - 5
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py

@@ -35,7 +35,7 @@ def setup_hadoop():
   )
   )
 
 
   #directories
   #directories
-  if params.has_namenode:
+  if params.has_namenode or params.dfs_type == 'HCFS':
     Directory(params.hdfs_log_dir_prefix,
     Directory(params.hdfs_log_dir_prefix,
               recursive=True,
               recursive=True,
               owner='root',
               owner='root',
@@ -43,12 +43,13 @@ def setup_hadoop():
               mode=0775,
               mode=0775,
               cd_access='a',
               cd_access='a',
     )
     )
-    Directory(params.hadoop_pid_dir_prefix,
+    if params.has_namenode:
+      Directory(params.hadoop_pid_dir_prefix,
               recursive=True,
               recursive=True,
               owner='root',
               owner='root',
               group='root',
               group='root',
               cd_access='a',
               cd_access='a',
-    )
+      )
     Directory(params.hadoop_tmp_dir,
     Directory(params.hadoop_tmp_dir,
               recursive=True,
               recursive=True,
               owner=params.hdfs_user,
               owner=params.hdfs_user,
@@ -61,7 +62,7 @@ def setup_hadoop():
       tc_owner = params.hdfs_user
       tc_owner = params.hdfs_user
       
       
     # if WebHDFS is not enabled we need this jar to create hadoop folders.
     # if WebHDFS is not enabled we need this jar to create hadoop folders.
-    if not WebHDFSUtil.is_webhdfs_available(params.is_webhdfs_enabled, params.default_fs):
+    if params.dfs_type == 'HCFS' or not WebHDFSUtil.is_webhdfs_available(params.is_webhdfs_enabled, params.default_fs):
       # for source-code of jar goto contrib/fast-hdfs-resource
       # for source-code of jar goto contrib/fast-hdfs-resource
       File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
       File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
            mode=0644,
            mode=0644,
@@ -107,7 +108,7 @@ def setup_configs():
   """
   """
   import params
   import params
 
 
-  if params.has_namenode:
+  if params.has_namenode or params.dfs_type == 'HCFS':
     if os.path.exists(params.hadoop_conf_dir):
     if os.path.exists(params.hadoop_conf_dir):
       File(params.task_log4j_properties_location,
       File(params.task_log4j_properties_location,
            content=StaticFile("task-log4j.properties"),
            content=StaticFile("task-log4j.properties"),

+ 2 - 0
ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py

@@ -217,6 +217,7 @@ class TestOozieClient(RMFTestCase):
                                 keytab = UnknownConfigurationMock(),
                                 keytab = UnknownConfigurationMock(),
                                 kinit_path_local = '/usr/bin/kinit',
                                 kinit_path_local = '/usr/bin/kinit',
                                 user = 'hdfs',
                                 user = 'hdfs',
+                                dfs_type = '',
                                 owner = 'ams',
                                 owner = 'ams',
                                 mode = 0775,
                                 mode = 0775,
                                 hadoop_conf_dir = '/etc/hadoop/conf',
                                 hadoop_conf_dir = '/etc/hadoop/conf',
@@ -232,6 +233,7 @@ class TestOozieClient(RMFTestCase):
                                 keytab = UnknownConfigurationMock(),
                                 keytab = UnknownConfigurationMock(),
                                 kinit_path_local = '/usr/bin/kinit',
                                 kinit_path_local = '/usr/bin/kinit',
                                 user = 'hdfs',
                                 user = 'hdfs',
+                                dfs_type = '',
                                 owner = 'ams',
                                 owner = 'ams',
                                 mode = 0711,
                                 mode = 0711,
                                 hadoop_conf_dir = '/etc/hadoop/conf',
                                 hadoop_conf_dir = '/etc/hadoop/conf',

+ 9 - 0
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py

@@ -281,6 +281,7 @@ class TestHBaseMaster(RMFTestCase):
         
         
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hbase',
         owner = 'hbase',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -293,6 +294,7 @@ class TestHBaseMaster(RMFTestCase):
         
         
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hbase',
         owner = 'hbase',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
         type = 'directory',
@@ -306,6 +308,7 @@ class TestHBaseMaster(RMFTestCase):
         
         
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -397,6 +400,7 @@ class TestHBaseMaster(RMFTestCase):
         
         
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hbase',
         owner = 'hbase',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -409,6 +413,7 @@ class TestHBaseMaster(RMFTestCase):
         
         
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hbase',
         owner = 'hbase',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
         type = 'directory',
@@ -422,6 +427,7 @@ class TestHBaseMaster(RMFTestCase):
         
         
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -524,6 +530,7 @@ class TestHBaseMaster(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hbase',
         owner = 'hbase',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         type = 'directory',
@@ -538,6 +545,7 @@ class TestHBaseMaster(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hbase',
         owner = 'hbase',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         type = 'directory',
@@ -553,6 +561,7 @@ class TestHBaseMaster(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )

+ 24 - 0
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

@@ -104,6 +104,7 @@ class TestNamenode(RMFTestCase):
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
         owner = 'hdfs',
         owner = 'hdfs',
+        dfs_type = '',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
         action = ['create_on_execute'],
         action = ['create_on_execute'],
@@ -119,6 +120,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -135,6 +137,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -214,6 +217,7 @@ class TestNamenode(RMFTestCase):
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
         owner = 'hdfs',
         owner = 'hdfs',
+        dfs_type = '',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
         action = ['create_on_execute'],
         action = ['create_on_execute'],
@@ -230,6 +234,7 @@ class TestNamenode(RMFTestCase):
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
+        dfs_type = '',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
         action = ['create_on_execute'],
         action = ['create_on_execute'],
@@ -245,6 +250,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -343,6 +349,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
         owner = 'hdfs',
         owner = 'hdfs',
+        dfs_type = '',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
@@ -355,6 +362,7 @@ class TestNamenode(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -369,6 +377,7 @@ class TestNamenode(RMFTestCase):
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -446,6 +455,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hdfs',
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -462,6 +472,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -478,6 +489,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -537,6 +549,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs',
         principal_name = 'hdfs',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hdfs',
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -553,6 +566,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs',
         principal_name = 'hdfs',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -569,6 +583,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs',
         principal_name = 'hdfs',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -637,6 +652,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hdfs',
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -653,6 +669,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -669,6 +686,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -736,6 +754,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hdfs',
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -752,6 +771,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -768,6 +788,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -843,6 +864,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               kinit_path_local = '/usr/bin/kinit',
                               principal_name = None,
                               principal_name = None,
                               user = 'hdfs',
                               user = 'hdfs',
+                              dfs_type = '',
                               owner = 'hdfs',
                               owner = 'hdfs',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
                               type = 'directory',
@@ -859,6 +881,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               kinit_path_local = '/usr/bin/kinit',
                               principal_name = None,
                               principal_name = None,
                               user = 'hdfs',
                               user = 'hdfs',
+                              dfs_type = '',
                               owner = 'ambari-qa',
                               owner = 'ambari-qa',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
                               type = 'directory',
@@ -875,6 +898,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               kinit_path_local = '/usr/bin/kinit',
                               principal_name = None,
                               principal_name = None,
                               user = 'hdfs',
                               user = 'hdfs',
+                              dfs_type = '',
                               action = ['execute'],
                               action = ['execute'],
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               )
                               )

+ 4 - 0
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py

@@ -68,6 +68,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['create_on_execute'],
         action = ['create_on_execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -82,6 +83,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['delete_on_execute'],
         action = ['delete_on_execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
         type = 'file',
@@ -96,6 +98,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['create_on_execute'],
         action = ['create_on_execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
         type = 'file',
@@ -109,6 +112,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = None,
         principal_name = None,
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )

+ 14 - 0
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -299,6 +299,7 @@ class TestHiveServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hcat',
         owner = 'hcat',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -311,6 +312,7 @@ class TestHiveServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hcat',
         owner = 'hcat',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -328,6 +330,7 @@ class TestHiveServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hive',
         owner = 'hive',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -340,6 +343,7 @@ class TestHiveServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hive',
         owner = 'hive',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -353,6 +357,7 @@ class TestHiveServer(RMFTestCase):
           keytab = UnknownConfigurationMock(),
           keytab = UnknownConfigurationMock(),
           kinit_path_local = '/usr/bin/kinit',
           kinit_path_local = '/usr/bin/kinit',
           user = 'hdfs',
           user = 'hdfs',
+          dfs_type = '',
           owner = 'hive',
           owner = 'hive',
           group = 'hdfs',
           group = 'hdfs',
           hadoop_bin_dir = '/usr/bin',
           hadoop_bin_dir = '/usr/bin',
@@ -366,6 +371,7 @@ class TestHiveServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -481,6 +487,7 @@ class TestHiveServer(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hcat',
         owner = 'hcat',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -493,6 +500,7 @@ class TestHiveServer(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hcat',
         owner = 'hcat',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -506,6 +514,7 @@ class TestHiveServer(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hive',
         owner = 'hive',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -518,6 +527,7 @@ class TestHiveServer(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hive',
         owner = 'hive',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -530,6 +540,7 @@ class TestHiveServer(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hive',
         owner = 'hive',
         group = 'hdfs',
         group = 'hdfs',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
@@ -543,6 +554,7 @@ class TestHiveServer(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -876,6 +888,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )
@@ -914,6 +927,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )

+ 6 - 0
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py

@@ -99,6 +99,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'missing_principal',
         principal_name = 'missing_principal',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
         type = 'file',
@@ -114,6 +115,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'missing_principal',
         principal_name = 'missing_principal',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
         type = 'file',
@@ -128,6 +130,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'missing_principal',
         principal_name = 'missing_principal',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -209,6 +212,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs',
         principal_name = 'hdfs',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
         type = 'file',
@@ -224,6 +228,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs',
         principal_name = 'hdfs',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
         type = 'file',
@@ -238,6 +243,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs',
         principal_name = 'hdfs',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )

+ 17 - 1
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py

@@ -69,6 +69,7 @@ class TestOozieServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'oozie',
         owner = 'oozie',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -81,6 +82,7 @@ class TestOozieServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -272,6 +274,7 @@ class TestOozieServer(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               kinit_path_local = '/usr/bin/kinit',
                               principal_name = UnknownConfigurationMock(),
                               principal_name = UnknownConfigurationMock(),
                               user = 'hdfs',
                               user = 'hdfs',
+                              dfs_type = '',
                               owner = 'oozie',
                               owner = 'oozie',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
                               type = 'directory',
@@ -287,6 +290,7 @@ class TestOozieServer(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               kinit_path_local = '/usr/bin/kinit',
                               principal_name = UnknownConfigurationMock(),
                               principal_name = UnknownConfigurationMock(),
                               user = 'hdfs',
                               user = 'hdfs',
+                              dfs_type = '',
                               action = ['execute'],
                               action = ['execute'],
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               )
                               )
@@ -487,6 +491,7 @@ class TestOozieServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
@@ -505,6 +510,7 @@ class TestOozieServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -576,6 +582,7 @@ class TestOozieServer(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs',
         principal_name = 'hdfs',
@@ -594,6 +601,7 @@ class TestOozieServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs',
         principal_name = 'hdfs',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -630,6 +638,7 @@ class TestOozieServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'oozie',
         owner = 'oozie',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
         type = 'directory',
@@ -642,6 +651,7 @@ class TestOozieServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -806,6 +816,7 @@ class TestOozieServer(RMFTestCase):
         
         
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'oozie',
         owner = 'oozie',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
         type = 'directory',
@@ -819,6 +830,7 @@ class TestOozieServer(RMFTestCase):
         
         
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -1339,6 +1351,7 @@ class TestOozieServer(RMFTestCase):
       keytab = UnknownConfigurationMock(),
       keytab = UnknownConfigurationMock(),
       default_fs = 'hdfs://c6401.ambari.apache.org:8020',
       default_fs = 'hdfs://c6401.ambari.apache.org:8020',
       user = 'hdfs',
       user = 'hdfs',
+      dfs_type = '',
       hdfs_site = UnknownConfigurationMock(),
       hdfs_site = UnknownConfigurationMock(),
       kinit_path_local = '/usr/bin/kinit',
       kinit_path_local = '/usr/bin/kinit',
       principal_name = UnknownConfigurationMock(),
       principal_name = UnknownConfigurationMock(),
@@ -1358,7 +1371,8 @@ class TestOozieServer(RMFTestCase):
       hdfs_site = UnknownConfigurationMock(),
       hdfs_site = UnknownConfigurationMock(),
       kinit_path_local = '/usr/bin/kinit',
       kinit_path_local = '/usr/bin/kinit',
       principal_name = UnknownConfigurationMock(),
       principal_name = UnknownConfigurationMock(),
-      user = 'hdfs',
+      user = 'hdfs', 
+      dfs_type = '',
       action = ['execute'],
       action = ['execute'],
       hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf' )
       hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf' )
 
 
@@ -1410,6 +1424,7 @@ class TestOozieServer(RMFTestCase):
       keytab = UnknownConfigurationMock(),
       keytab = UnknownConfigurationMock(),
       default_fs = 'hdfs://c6401.ambari.apache.org:8020',
       default_fs = 'hdfs://c6401.ambari.apache.org:8020',
       user = 'hdfs',
       user = 'hdfs',
+      dfs_type = '',
       hdfs_site = UnknownConfigurationMock(),
       hdfs_site = UnknownConfigurationMock(),
       kinit_path_local = '/usr/bin/kinit',
       kinit_path_local = '/usr/bin/kinit',
       principal_name = UnknownConfigurationMock(),
       principal_name = UnknownConfigurationMock(),
@@ -1430,6 +1445,7 @@ class TestOozieServer(RMFTestCase):
       kinit_path_local = '/usr/bin/kinit',
       kinit_path_local = '/usr/bin/kinit',
       principal_name = UnknownConfigurationMock(),
       principal_name = UnknownConfigurationMock(),
       user = 'hdfs',
       user = 'hdfs',
+      dfs_type = '',
       action = ['execute'],
       action = ['execute'],
       hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf' )
       hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf' )
 
 

+ 9 - 0
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py

@@ -88,8 +88,10 @@ class TestServiceCheck(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
+        dfs_type = '',
         type = 'directory',
         type = 'directory',
     )
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/examples',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/examples',
@@ -99,10 +101,12 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         source = '//examples',
         source = '//examples',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
+        dfs_type = '',
         group = 'hadoop'
         group = 'hadoop'
     )
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
@@ -111,8 +115,10 @@ class TestServiceCheck(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
+        dfs_type = '',
         type = 'directory',
         type = 'directory',
     )
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
@@ -122,10 +128,12 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         source = '//examples/input-data',
         source = '//examples/input-data',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
+        dfs_type = '',
         group = 'hadoop'
         group = 'hadoop'
     )
     )
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
@@ -134,6 +142,7 @@ class TestServiceCheck(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )

+ 6 - 0
ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py

@@ -43,6 +43,7 @@ class TestPigServiceCheck(RMFTestCase):
         user = 'hdfs',
         user = 'hdfs',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
+        dfs_type = '',
         type = 'directory',
         type = 'directory',
         action = ['delete_on_execute'],
         action = ['delete_on_execute'],
     )
     )
@@ -58,6 +59,7 @@ class TestPigServiceCheck(RMFTestCase):
         user = 'hdfs',
         user = 'hdfs',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
+        dfs_type = '',
         type = 'file',
         type = 'file',
         action = ['create_on_execute'],
         action = ['create_on_execute'],
     )
     )
@@ -70,6 +72,7 @@ class TestPigServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -111,6 +114,7 @@ class TestPigServiceCheck(RMFTestCase):
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs',
         principal_name = 'hdfs',
+        dfs_type = '',
         user = 'hdfs',
         user = 'hdfs',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
@@ -128,6 +132,7 @@ class TestPigServiceCheck(RMFTestCase):
         principal_name = 'hdfs',
         principal_name = 'hdfs',
         user = 'hdfs',
         user = 'hdfs',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
+        dfs_type = '',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
         type = 'file',
         action = ['create_on_execute'],
         action = ['create_on_execute'],
@@ -141,6 +146,7 @@ class TestPigServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs',
         principal_name = 'hdfs',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )

+ 15 - 1
ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py

@@ -61,6 +61,7 @@ class TestHistoryServer(RMFTestCase):
                           type="directory",
                           type="directory",
                           action=["create_on_execute"],
                           action=["create_on_execute"],
                           user=u"hdfs",
                           user=u"hdfs",
+                          dfs_type = '',
                           owner=u"tez",
                           owner=u"tez",
                           mode=493,
                           mode=493,
                           hadoop_bin_dir="/usr/bin",
                           hadoop_bin_dir="/usr/bin",
@@ -78,6 +79,7 @@ class TestHistoryServer(RMFTestCase):
                               action=["create_on_execute"],
                               action=["create_on_execute"],
                               user=u'hdfs',
                               user=u'hdfs',
                               owner=u"tez",
                               owner=u"tez",
+                              dfs_type = '',
                               mode=493,
                               mode=493,
                               hadoop_bin_dir="/usr/bin",
                               hadoop_bin_dir="/usr/bin",
                               hadoop_conf_dir="/etc/hadoop/conf",
                               hadoop_conf_dir="/etc/hadoop/conf",
@@ -93,6 +95,7 @@ class TestHistoryServer(RMFTestCase):
                               action=['execute'],
                               action=['execute'],
                               user=u'hdfs',
                               user=u'hdfs',
                               hadoop_bin_dir="/usr/bin",
                               hadoop_bin_dir="/usr/bin",
+                              dfs_type = '',
                               hadoop_conf_dir="/etc/hadoop/conf",
                               hadoop_conf_dir="/etc/hadoop/conf",
                               hdfs_site=self.getConfig()["configurations"]["hdfs-site"],
                               hdfs_site=self.getConfig()["configurations"]["hdfs-site"],
                               default_fs=u'hdfs://c6401.ambari.apache.org:8020',
                               default_fs=u'hdfs://c6401.ambari.apache.org:8020',
@@ -193,6 +196,7 @@ class TestHistoryServer(RMFTestCase):
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         recursive_chmod = True,
         recursive_chmod = True,
         owner = 'yarn',
         owner = 'yarn',
@@ -207,7 +211,8 @@ class TestHistoryServer(RMFTestCase):
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
+        user = 'hdfs', 
+        dfs_type = '',
         owner = 'mapred',
         owner = 'mapred',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -219,6 +224,7 @@ class TestHistoryServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hdfs',
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -231,6 +237,7 @@ class TestHistoryServer(RMFTestCase):
         change_permissions_for_parents = True,
         change_permissions_for_parents = True,
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'mapred',
         owner = 'mapred',
         group = 'hadoop',
         group = 'hadoop',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
@@ -244,6 +251,7 @@ class TestHistoryServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -416,6 +424,7 @@ class TestHistoryServer(RMFTestCase):
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         recursive_chmod = True,
         recursive_chmod = True,
         owner = 'yarn',
         owner = 'yarn',
@@ -431,6 +440,7 @@ class TestHistoryServer(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'mapred',
         owner = 'mapred',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -442,6 +452,7 @@ class TestHistoryServer(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'hdfs',
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -454,6 +465,7 @@ class TestHistoryServer(RMFTestCase):
         change_permissions_for_parents = True,
         change_permissions_for_parents = True,
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'mapred',
         owner = 'mapred',
         group = 'hadoop',
         group = 'hadoop',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
@@ -467,6 +479,7 @@ class TestHistoryServer(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -775,6 +788,7 @@ class TestHistoryServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )

+ 6 - 0
ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py

@@ -45,6 +45,7 @@ class TestServiceCheck(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -56,6 +57,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         source = '/etc/passwd',
         source = '/etc/passwd',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
         type = 'file',
@@ -66,6 +68,7 @@ class TestServiceCheck(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -99,6 +102,7 @@ class TestServiceCheck(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -110,6 +114,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         source = '/etc/passwd',
         source = '/etc/passwd',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
         type = 'file',
@@ -120,6 +125,7 @@ class TestServiceCheck(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )

+ 6 - 0
ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py

@@ -133,6 +133,7 @@ class TestFalconServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'falcon',
         owner = 'falcon',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -149,6 +150,7 @@ class TestFalconServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'falcon',
         owner = 'falcon',
         group='users',
         group='users',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
@@ -166,6 +168,7 @@ class TestFalconServer(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )
@@ -312,6 +315,7 @@ class TestFalconServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'falcon',
         owner = 'falcon',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         type = 'directory',
@@ -329,6 +333,7 @@ class TestFalconServer(RMFTestCase):
         source = '/usr/hdp/current/falcon-server/data-mirroring',
         source = '/usr/hdp/current/falcon-server/data-mirroring',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
@@ -350,6 +355,7 @@ class TestFalconServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )

+ 3 - 0
ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py

@@ -43,6 +43,7 @@ class TestTezServiceCheck(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
@@ -55,6 +56,7 @@ class TestTezServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         source = '/tmp/sample-tez-test',
         source = '/tmp/sample-tez-test',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
         type = 'file',
@@ -66,6 +68,7 @@ class TestTezServiceCheck(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     )

+ 6 - 0
ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py

@@ -46,6 +46,7 @@ class TestPigServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs@EXAMPLE.COM',
         principal_name = 'hdfs@EXAMPLE.COM',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         type = 'directory',
@@ -61,6 +62,7 @@ class TestPigServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs@EXAMPLE.COM',
         principal_name = 'hdfs@EXAMPLE.COM',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'file',
         type = 'file',
@@ -75,6 +77,7 @@ class TestPigServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs@EXAMPLE.COM',
         principal_name = 'hdfs@EXAMPLE.COM',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )
@@ -106,6 +109,7 @@ class TestPigServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs@EXAMPLE.COM',
         principal_name = 'hdfs@EXAMPLE.COM',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         type = 'directory',
@@ -121,6 +125,7 @@ class TestPigServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs@EXAMPLE.COM',
         principal_name = 'hdfs@EXAMPLE.COM',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'file',
         type = 'file',
@@ -137,6 +142,7 @@ class TestPigServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs@EXAMPLE.COM',
         principal_name = 'hdfs@EXAMPLE.COM',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )

+ 6 - 0
ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py

@@ -62,6 +62,7 @@ class TestJobHistoryServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )
@@ -125,6 +126,7 @@ class TestJobHistoryServer(RMFTestCase):
         kinit_path_local='/usr/bin/kinit',
         kinit_path_local='/usr/bin/kinit',
         principal_name=UnknownConfigurationMock(),
         principal_name=UnknownConfigurationMock(),
         security_enabled=True,
         security_enabled=True,
+        dfs_type = '',
         user=UnknownConfigurationMock()
         user=UnknownConfigurationMock()
     )
     )
 
 
@@ -172,6 +174,7 @@ class TestJobHistoryServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'spark',
         owner = 'spark',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         type = 'directory',
@@ -187,6 +190,7 @@ class TestJobHistoryServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )
@@ -237,6 +241,7 @@ class TestJobHistoryServer(RMFTestCase):
         user = UnknownConfigurationMock(),
         user = UnknownConfigurationMock(),
         owner = 'spark',
         owner = 'spark',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        dfs_type = '',
         type = 'directory',
         type = 'directory',
         action = ['create_on_execute'],
         action = ['create_on_execute'],
         mode = 0775,
         mode = 0775,
@@ -250,6 +255,7 @@ class TestJobHistoryServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         principal_name = UnknownConfigurationMock(),
         user = UnknownConfigurationMock(),
         user = UnknownConfigurationMock(),
+        dfs_type = '',
         action = ['execute'],
         action = ['execute'],
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )

+ 4 - 0
ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py

@@ -46,6 +46,7 @@ class TestMahoutClient(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         type = 'directory',
@@ -56,6 +57,7 @@ class TestMahoutClient(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         type = 'directory',
@@ -68,6 +70,7 @@ class TestMahoutClient(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         source = '/tmp/sample-mahout-test.txt',
         source = '/tmp/sample-mahout-test.txt',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'file',
         type = 'file',
@@ -79,6 +82,7 @@ class TestMahoutClient(RMFTestCase):
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         user = 'hdfs',
+        dfs_type = '',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     )

+ 15 - 0
ambari-web/app/data/HDP2/site_properties.js

@@ -3118,6 +3118,21 @@ var hdp2properties = [
     "filename": "hbase-env.xml",
     "filename": "hbase-env.xml",
     "category": "Advanced hbase-env"
     "category": "Advanced hbase-env"
   },
   },
+  //***************************************** ECS stack********************************************
+  {
+    "id": "puppet var",
+    "name": "hdfs_log_dir_prefix",
+    "displayName": "Hadoop Log Dir Prefix",
+    "description": "The parent directory for Hadoop log files.  The HDFS log directory will be ${hadoop_log_dir_prefix} / ${hdfs_user} and the MapReduce log directory will be ${hadoop_log_dir_prefix} / ${mapred_user}.",
+    "recommendedValue": "/var/log/hadoop",
+    "isReconfigurable": false,
+    "displayType": "directory",
+    "isOverridable": false,
+    "isVisible": false,
+    "serviceName": "ECS",
+    "filename": "hadoop-env.xml",
+    "category": "General Hadoop"
+  },
   //***************************************** GLUSTERFS stack********************************************
   //***************************************** GLUSTERFS stack********************************************
   {
   {
     "id": "site property",
     "id": "site property",

+ 6 - 2
contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Runner.java

@@ -82,8 +82,12 @@ public class Runner {
           dfs.delete(pathHadoop, true);
           dfs.delete(pathHadoop, true);
         }
         }
       }
       }
-
-    } finally {
+    } 
+    catch(Exception e) {
+       System.out.println("Exception occurred, Reason: " + e.getMessage());
+       e.printStackTrace();
+    }
+    finally {
       dfs.close();
       dfs.close();
     }
     }