Browse Source

AMBARI-7115. Stack Service Pluggability: Adding service to a stack requires editing hooks folder (aonishuk)

Andrew Onishuk 11 years ago
parent
commit
a940986517
56 changed files with 247 additions and 303 deletions
  1. 2 0
      ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
  2. 11 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
  3. 10 7
      ambari-server/src/main/java/org/apache/ambari/server/controller/StackConfigurationResponse.java
  4. 5 2
      ambari-server/src/main/java/org/apache/ambari/server/controller/StackLevelConfigurationResponse.java
  5. 2 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
  6. 10 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
  7. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
  8. 6 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackConfigurationResourceProvider.java
  9. 6 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackLevelConfigurationResourceProvider.java
  10. 2 2
      ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
  11. 28 0
      ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
  12. 22 12
      ambari-server/src/main/java/org/apache/ambari/server/state/PropertyInfo.java
  13. 2 0
      ambari-server/src/main/resources/properties.json
  14. 2 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/configuration/cluster-env.xml
  15. 18 11
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
  16. 12 99
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
  17. 2 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/GANGLIA/configuration/ganglia-env.xml
  18. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/configuration/hbase-env.xml
  19. 4 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/configuration/hadoop-env.xml
  20. 0 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
  21. 3 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/hive-env.xml
  22. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/hive-site.xml
  23. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/configuration/mapred-env.xml
  24. 3 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/configuration/nagios-env.xml
  25. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/configuration/oozie-env.xml
  26. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/configuration/oozie-site.xml
  27. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml
  28. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/configuration/zookeeper-env.xml
  29. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
  30. 21 14
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
  31. 11 125
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
  32. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/configuration/flume-env.xml
  33. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/configuration/ganglia-env.xml
  34. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/configuration/hbase-env.xml
  35. 4 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/hadoop-env.xml
  36. 0 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
  37. 3 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/hive-env.xml
  38. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/hive-site.xml
  39. 3 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/configuration/nagios-env.xml
  40. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml
  41. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-site.xml
  42. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/configuration/sqoop-env.xml
  43. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/configuration-mapred/mapred-env.xml
  44. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/configuration/yarn-env.xml
  45. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/configuration/zookeeper-env.xml
  46. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/configuration/hive-site.xml
  47. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/configuration/falcon-env.xml
  48. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml
  49. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/configuration/storm-env.xml
  50. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/configuration/tez-env.xml
  51. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.1/services/YARN/configuration/yarn-env.xml
  52. 2 2
      ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
  53. 3 3
      ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
  54. 5 5
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
  55. 17 7
      ambari-server/src/test/java/org/apache/ambari/server/orm/entities/BlueprintEntityTest.java
  56. 1 1
      ambari-server/src/test/resources/stacks/HDP/2.0.1/services/HIVE/configuration/hive-site.xml

+ 2 - 0
ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java

@@ -303,6 +303,8 @@ public class ExecutionCommand extends AgentCommand {
     String AMBARI_DB_RCA_PASSWORD = "ambari_db_rca_password";
     String COMPONENT_CATEGORY = "component_category";
     String REFRESH_ADITIONAL_COMPONENT_TAGS = "forceRefreshConfigTags";
+    String USER_LIST = "user_list";
+    String GROUP_LIST = "group_list";
 
     String SERVICE_CHECK = "SERVICE_CHECK"; // TODO: is it standart command? maybe add it to RoleCommand enum?
     String CUSTOM_COMMAND = "custom_command";

+ 11 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java

@@ -40,6 +40,8 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_P
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_REPO_INFO;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.USER_LIST;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GROUP_LIST;
 
 import java.io.File;
 import java.io.IOException;
@@ -107,6 +109,7 @@ import org.apache.ambari.server.state.HostState;
 import org.apache.ambari.server.state.MaintenanceState;
 import org.apache.ambari.server.state.OperatingSystemInfo;
 import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.PropertyInfo.PropertyType;
 import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
@@ -1626,6 +1629,14 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     }
     String packageList = gson.toJson(packages);
     hostParams.put(PACKAGE_LIST, packageList);
+    
+    Set<String> userSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.USER, cluster);
+    String userList = gson.toJson(userSet);
+    hostParams.put(USER_LIST, userList);
+    
+    Set<String> groupSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.GROUP, cluster);
+    String groupList = gson.toJson(groupSet);
+    hostParams.put(GROUP_LIST, groupList);
 
     if (configs.getServerDBName().equalsIgnoreCase(Configuration
       .ORACLE_DB_NAME)) {

+ 10 - 7
ambari-server/src/main/java/org/apache/ambari/server/controller/StackConfigurationResponse.java

@@ -20,6 +20,9 @@ package org.apache.ambari.server.controller;
 
 
 import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.server.state.PropertyInfo.PropertyType;
 
 public class StackConfigurationResponse {
 
@@ -52,13 +55,13 @@ public class StackConfigurationResponse {
    */
   public StackConfigurationResponse(String propertyName, String propertyValue,
                                     String propertyDescription, String type,
-                                    Boolean isRequired, String propertyType, Map<String, String> propertyAttributes) {
+                                    Boolean isRequired, Set<PropertyType> propertyTypes, Map<String, String> propertyAttributes) {
     setPropertyName(propertyName);
     setPropertyValue(propertyValue);
     setPropertyDescription(propertyDescription);
     setType(type);
     setRequired(isRequired);
-    setPropertyType(propertyType);
+    setPropertyType(propertyTypes);
     setPropertyAttributes(propertyAttributes);
   }
 
@@ -71,7 +74,7 @@ public class StackConfigurationResponse {
   private String type;
   private Map<String, String> propertyAttributes;
   private Boolean isRequired;
-  private String propertyType;
+  private Set<PropertyType> propertyTypes;
 
   public String getStackName() {
     return stackName;
@@ -171,11 +174,11 @@ public class StackConfigurationResponse {
    * Get type of property as set in the stack definition.
    * @return Property type.
    */
-  public String getPropertyType() {
-    return propertyType;
+  public Set<PropertyType> getPropertyType() {
+    return propertyTypes;
   }
 
-  public void setPropertyType(String propertyType) {
-    this.propertyType = propertyType;
+  public void setPropertyType(Set<PropertyType> propertyTypes) {
+    this.propertyTypes = propertyTypes;
   }
 }

+ 5 - 2
ambari-server/src/main/java/org/apache/ambari/server/controller/StackLevelConfigurationResponse.java

@@ -20,14 +20,17 @@ package org.apache.ambari.server.controller;
 
 
 import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.server.state.PropertyInfo.PropertyType;
 
 public class StackLevelConfigurationResponse extends StackConfigurationResponse {
   public StackLevelConfigurationResponse(String propertyName,
       String propertyValue, String propertyDescription, String type,
-      Boolean isRequired, String propertyType,
+      Boolean isRequired, Set<PropertyType> propertyTypes,
       Map<String, String> propertyAttributes) {
     super(propertyName, propertyValue, propertyDescription, type, isRequired,
-        propertyType, propertyAttributes);
+        propertyTypes, propertyAttributes);
   }
   
   public StackLevelConfigurationResponse(String propertyName, String propertyValue, String propertyDescription,

+ 2 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java

@@ -20,6 +20,7 @@ package org.apache.ambari.server.controller.internal;
 
 import com.google.gson.Gson;
 import com.google.inject.Inject;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.DuplicateResourceException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
@@ -557,7 +558,7 @@ public class BlueprintResourceProvider extends BaseBlueprintProcessor {
               blueprint.getBlueprintName());
         }
         Map<String, Map<String, Collection<String>>> missingProperties = blueprint.validateConfigurations(
-            stackInfo, PropertyInfo.PropertyType.DEFAULT);
+            stackInfo, false);
 
         if (! missingProperties.isEmpty()) {
           throw new IllegalArgumentException("Required configurations are missing from the specified host groups: " +

+ 10 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java

@@ -22,6 +22,7 @@ import com.google.inject.Inject;
 import com.google.inject.assistedinject.Assisted;
 import com.google.inject.assistedinject.AssistedInject;
 import com.google.inject.persist.Transactional;
+
 import org.apache.ambari.server.*;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
@@ -29,6 +30,7 @@ import org.apache.ambari.server.controller.*;
 import org.apache.ambari.server.controller.spi.*;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.state.*;
+import org.apache.ambari.server.state.PropertyInfo.PropertyType;
 import org.apache.ambari.server.utils.StageUtils;
 
 import java.io.File;
@@ -36,7 +38,6 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.util.*;
-import java.util.List;
 import java.util.concurrent.TimeoutException;
 
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.*;
@@ -241,6 +242,14 @@ public class ClientConfigResourceProvider extends AbstractControllerResourceProv
       }
       String packageList = gson.toJson(packages);
       hostLevelParams.put(PACKAGE_LIST, packageList);
+      
+      Set<String> userSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.USER, cluster);
+      String userList = gson.toJson(userSet);
+      hostLevelParams.put(USER_LIST, userList);
+      
+      Set<String> groupSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.GROUP, cluster);
+      String groupList = gson.toJson(groupSet);
+      hostLevelParams.put(GROUP_LIST, groupList);
 
       String jsonConfigurations = null;
       Map<String, Object> commandParams = new HashMap<String, Object>();

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java

@@ -454,7 +454,7 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
                                           String defaultPassword) {
 
     Map<String, Map<String, Collection<String>>> missingPasswords = blueprint.validateConfigurations(
-        stackInfo, PropertyInfo.PropertyType.PASSWORD);
+        stackInfo, true);
 
     Iterator<Map.Entry<String, Map<String, Collection<String>>>> iter;
     for(iter = missingPasswords.entrySet().iterator(); iter.hasNext(); ) {

+ 6 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackConfigurationResourceProvider.java

@@ -59,6 +59,9 @@ public class StackConfigurationResourceProvider extends
 
   public static final String PROPERTY_DESCRIPTION_PROPERTY_ID = PropertyHelper
       .getPropertyId("StackConfigurations", "property_description");
+  
+  public static final String PROPERTY_PROPERTY_TYPE_PROPERTY_ID = PropertyHelper
+      .getPropertyId("StackConfigurations", "property_type");
 
   public static final String PROPERTY_TYPE_PROPERTY_ID = PropertyHelper
       .getPropertyId("StackConfigurations", "type");
@@ -125,6 +128,9 @@ public class StackConfigurationResourceProvider extends
       setResourceProperty(resource, PROPERTY_DESCRIPTION_PROPERTY_ID,
           response.getPropertyDescription(), requestedIds);
       
+      setResourceProperty(resource, PROPERTY_PROPERTY_TYPE_PROPERTY_ID, 
+          response.getPropertyType(), requestedIds);
+      
       setResourceProperty(resource, PROPERTY_TYPE_PROPERTY_ID,
           response.getType(), requestedIds);
 

+ 6 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackLevelConfigurationResourceProvider.java

@@ -56,6 +56,9 @@ public class StackLevelConfigurationResourceProvider extends
 
   public static final String PROPERTY_DESCRIPTION_PROPERTY_ID = PropertyHelper
       .getPropertyId("StackLevelConfigurations", "property_description");
+  
+  public static final String PROPERTY_PROPERTY_TYPE_PROPERTY_ID = PropertyHelper
+      .getPropertyId("StackConfigurations", "property_type");
 
   public static final String PROPERTY_TYPE_PROPERTY_ID = PropertyHelper
       .getPropertyId("StackLevelConfigurations", "type");
@@ -119,6 +122,9 @@ public class StackLevelConfigurationResourceProvider extends
       setResourceProperty(resource, PROPERTY_DESCRIPTION_PROPERTY_ID,
           response.getPropertyDescription(), requestedIds);
       
+      setResourceProperty(resource, PROPERTY_PROPERTY_TYPE_PROPERTY_ID, 
+          response.getPropertyType(), requestedIds);
+      
       setResourceProperty(resource, PROPERTY_TYPE_PROPERTY_ID,
           response.getType(), requestedIds);
 

+ 2 - 2
ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java

@@ -172,7 +172,7 @@ public class BlueprintEntity {
    * @throws IllegalArgumentException if blueprint contains invalid information
    */
   public Map<String, Map<String, Collection<String>>> validateConfigurations(
-      AmbariMetaInfo stackInfo, PropertyInfo.PropertyType type) {
+      AmbariMetaInfo stackInfo, boolean validatePasswords) {
 
     String stackName = getStackName();
     String stackVersion = getStackVersion();
@@ -203,7 +203,7 @@ public class BlueprintEntity {
                 stackName, stackVersion, service);
 
             for (PropertyInfo propertyInfo : serviceRequirements.values()) {
-              if (propertyInfo.getType() == type) {
+              if (! (validatePasswords ^ propertyInfo.getPropertyTypes().contains(PropertyInfo.PropertyType.PASSWORD))) {
                 String configCategory = propertyInfo.getFilename();
                 if (configCategory.endsWith(".xml")) {
                   configCategory = configCategory.substring(0, configCategory.indexOf(".xml"));

+ 28 - 0
ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java

@@ -46,6 +46,7 @@ import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.ConfigurationRequest;
 import org.apache.ambari.server.orm.dao.ClusterDAO;
 import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
+import org.apache.ambari.server.state.PropertyInfo.PropertyType;
 import org.apache.ambari.server.upgrade.UpgradeCatalog170;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -437,6 +438,33 @@ public class ConfigHelper {
     return result;
   }
   
+  public Set<String> getPropertyValuesWithPropertyType(StackId stackId, PropertyType propertyType, Cluster cluster) throws AmbariException {
+    StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+        stackId.getStackVersion());
+    
+    Set<String> result = new HashSet<String>();
+
+    for(Service service : cluster.getServices().values()) {
+      Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), service.getName());
+      
+      for (PropertyInfo stackProperty : stackProperties) {
+        if(stackProperty.getPropertyTypes().contains(propertyType)) {
+          result.add(stackProperty.getValue());
+        }
+      }
+    }
+    
+    Set<PropertyInfo> stackProperties = ambariMetaInfo.getStackProperties(stack.getName(), stack.getVersion());
+    
+    for (PropertyInfo stackProperty : stackProperties) {
+      if(stackProperty.getPropertyTypes().contains(propertyType)) {
+        result.add(stackProperty.getValue());
+      }
+    }
+    
+    return result;
+  }
+  
   public String getPropertyValueFromStackDefenitions(Cluster cluster, String configType, String propertyName) throws AmbariException {
     StackId stackId = cluster.getCurrentStackVersion();
     StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),

+ 22 - 12
ambari-server/src/main/java/org/apache/ambari/server/state/PropertyInfo.java

@@ -24,10 +24,15 @@ import org.w3c.dom.Element;
 
 import javax.xml.bind.annotation.XmlAnyElement;
 import javax.xml.bind.annotation.XmlAttribute;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlList;
+
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 public class PropertyInfo {
   private String name;
@@ -36,7 +41,9 @@ public class PropertyInfo {
   private String filename;
   private boolean deleted;
   private boolean requireInput;
-  private PropertyType type = PropertyType.DEFAULT;
+  
+  private Set<PropertyType> propertyTypes = new HashSet<PropertyType>();
+
   @XmlAnyElement
   private List<Element> propertyAttributes = new ArrayList<Element>();
 
@@ -72,9 +79,19 @@ public class PropertyInfo {
     this.filename = filename;
   }
   
+  @XmlElement(name = "property-type")
+  @XmlList
+  public Set<PropertyType> getPropertyTypes() {
+    return propertyTypes;
+  }
+
+  public void setPropertyTypes(Set<PropertyType> propertyTypes) {
+    this.propertyTypes = propertyTypes;
+  }
+  
   public StackConfigurationResponse convertToResponse() {
     return new StackConfigurationResponse(getName(), getValue(),
-      getDescription() , getFilename(), isRequireInput(), getType().name(), getAttributesMap());
+      getDescription() , getFilename(), isRequireInput(), getPropertyTypes(), getAttributesMap());
   }
 
   public boolean isDeleted() {
@@ -102,14 +119,6 @@ public class PropertyInfo {
     this.requireInput = requireInput;
   }
 
-  public PropertyType getType() {
-    return type;
-  }
-
-  public void setType(PropertyType type) {
-    this.type = type;
-  }
-
   @Override
   public int hashCode() {
     final int prime = 31;
@@ -155,7 +164,8 @@ public class PropertyInfo {
   }
 
   public enum PropertyType {
-    DEFAULT,
-    PASSWORD
+    PASSWORD,
+    USER,
+    GROUP
   }
 }

+ 2 - 0
ambari-server/src/main/resources/properties.json

@@ -247,6 +247,7 @@
         "StackConfigurations/property_description",
         "StackConfigurations/type",
         "StackConfigurations/final",
+        "StackConfigurations/property_type",
         "_"
     ],
     "StackServiceComponent":[
@@ -454,6 +455,7 @@
         "StackLevelConfigurations/property_description",
         "StackLevelConfigurations/type",
         "StackLevelConfigurations/final",
+        "StackConfigurations/property_type",
         "_"
     ]
 }

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/configuration/cluster-env.xml

@@ -39,11 +39,13 @@
     <property>
         <name>smokeuser</name>
         <value>ambari-qa</value>
+        <property-type>USER</property-type>
         <description>User executing service checks</description>
     </property>
     <property>
         <name>user_group</name>
         <value>hadoop</value>
+        <property-type>GROUP</property-type>
         <description>Hadoop user group.</description>
     </property>
 </configuration>

+ 18 - 11
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py

@@ -20,6 +20,8 @@ limitations under the License.
 from resource_management import *
 from resource_management.core.system import System
 import os
+import json
+import collections
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
@@ -70,23 +72,13 @@ hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_p
 #users and groups
 hbase_user = config['configurations']['hbase-env']['hbase_user']
 nagios_user = config['configurations']['nagios-env']['nagios_user']
-oozie_user = config['configurations']['oozie-env']['oozie_user']
-webhcat_user = config['configurations']['hive-env']['hcat_user']
-hcat_user = config['configurations']['hive-env']['hcat_user']
-hive_user = config['configurations']['hive-env']['hive_user']
 smoke_user =  config['configurations']['hadoop-env']['smokeuser']
-mapred_user = config['configurations']['mapred-env']['mapred_user']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-zk_user = config['configurations']['zookeeper-env']['zk_user']
 gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
 gmond_user = config['configurations']['ganglia-env']["gmond_user"]
-sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 
 user_group = config['configurations']['hadoop-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 nagios_group = config['configurations']['nagios-env']['nagios_group']
-smoke_user_group =  "users"
-mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 
 #hosts
 hostname = config["hostname"]
@@ -129,7 +121,22 @@ if has_ganglia_server:
 hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
 ignore_groupsusers_create = default("/configurations/hadoop-env/ignore_groupsusers_create", False)
 
-
+smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+if has_hbase_masters:
+  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
 #repo params
 repo_info = config['hostLevelParams']['repo_info']
 service_repo_info = default("/hostLevelParams/service_repo_info",None)
+
+user_to_groups_dict = collections.defaultdict(lambda:[user_group])
+user_to_groups_dict[smoke_user] = [proxyuser_group]
+if has_ganglia_server:
+  user_to_groups_dict[gmond_user] = [gmond_user]
+  user_to_groups_dict[gmetad_user] = [gmetad_user]
+
+user_to_gid_dict = collections.defaultdict(lambda:user_group)
+if has_nagios:
+  user_to_gid_dict[nagios_user] = nagios_group
+
+user_list = json.loads(config['hostLevelParams']['user_list'])
+group_list = json.loads(config['hostLevelParams']['group_list'])

+ 12 - 99
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -26,110 +26,23 @@ def setup_users():
   Creates users before cluster installation
   """
   import params
-
-  Group(params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
-  )
-  Group(params.smoke_user_group,
-         ignore_failures = params.ignore_groupsusers_create
-  )
-  Group(params.proxyuser_group,
-         ignore_failures = params.ignore_groupsusers_create
-  )
-  User(params.smoke_user,
-       gid=params.user_group,
-       groups=[params.proxyuser_group],
-       ignore_failures = params.ignore_groupsusers_create
-  )
-  
-  smoke_user_dirs = format(
-    "/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
-  set_uid(params.smoke_user, smoke_user_dirs)
-
-  if params.has_hbase_masters:
-    User(params.hbase_user,
-         gid = params.user_group,
-         groups=[params.user_group],
-         ignore_failures = params.ignore_groupsusers_create
-    )
-    hbase_user_dirs = format(
-      "/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
-    set_uid(params.hbase_user, hbase_user_dirs)
-
-  if params.has_nagios:
-    Group(params.nagios_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-    User(params.nagios_user,
-         gid=params.nagios_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_oozie_server:
-    User(params.oozie_user,
-         gid = params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_hcat_server_host:
-    User(params.webhcat_user,
-         gid = params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-    User(params.hcat_user,
-         gid = params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_hive_server_host:
-    User(params.hive_user,
-         gid = params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_ganglia_server:
-    Group(params.gmetad_user,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-    Group(params.gmond_user,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-    User(params.gmond_user,
-         gid=params.user_group,
-         groups=[params.gmond_user],
-         ignore_failures = params.ignore_groupsusers_create
-    )
-    User(params.gmetad_user,
-         gid=params.user_group,
-         groups=[params.gmetad_user],
-         ignore_failures = params.ignore_groupsusers_create
-    )
   
-  if params.has_namenode:
-    User(params.hdfs_user,
-          gid=params.user_group,
-          groups=[params.user_group],
-          ignore_failures = params.ignore_groupsusers_create
-    )
-  if params.has_jt:
-    User(params.mapred_user,
-         gid=params.user_group,
-         groups=[params.user_group],
-         ignore_failures = params.ignore_groupsusers_create
+  for group in params.group_list:
+    Group(group,
+        ignore_failures = params.ignore_groupsusers_create
     )
     
-  if params.has_zk_host:
-    User(params.zk_user,
-         gid=params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
+  for user in params.user_list: 
+    User(user,
+        gid = params.user_to_gid_dict[user],
+        groups = params.user_to_groups_dict[user],
+        ignore_failures = params.ignore_groupsusers_create        
     )
+  
+  set_uid(params.smoke_user, params.smoke_user_dirs)
 
-  if params.has_sqoop_client:
-    User(params.sqoop_user,
-         gid=params.user_group,
-         groups=[params.user_group],
-         ignore_failures=params.ignore_groupsusers_create
-    )
+  if params.has_hbase_masters:
+    set_uid(params.hbase_user, params.hbase_user_dirs)
 
 def set_uid(user, user_dirs):
   """

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/GANGLIA/configuration/ganglia-env.xml

@@ -34,11 +34,13 @@
   <property>
     <name>gmetad_user</name>
     <value>nobody</value>
+    <property-type>USER GROUP</property-type>
     <description>User </description>
   </property>
     <property>
     <name>gmond_user</name>
     <value>nobody</value>
+    <property-type>USER GROUP</property-type>
     <description>User </description>
   </property>
   <property>

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/configuration/hbase-env.xml

@@ -54,6 +54,7 @@
   <property>
     <name>hbase_user</name>
     <value>hbase</value>
+    <property-type>USER</property-type>
     <description>HBase User Name.</description>
   </property>
   

+ 4 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/configuration/hadoop-env.xml

@@ -59,6 +59,7 @@
   <property>
     <name>proxyuser_group</name>
     <value>users</value>
+    <property-type>GROUP</property-type>
     <description>Proxy user group.</description>
   </property>
 
@@ -76,6 +77,7 @@
   <property>
     <name>hdfs_user</name>
     <value>hdfs</value>
+    <property-type>USER</property-type>
     <description>User to run HDFS as</description>
   </property>
   <property>
@@ -86,11 +88,13 @@
   <property>
     <name>smokeuser</name>
     <value>ambari-qa</value>
+    <property-type>USER</property-type>
     <description>User executing service checks</description>
   </property>
   <property>
     <name>user_group</name>
     <value>hadoop</value>
+    <property-type>GROUP</property-type>
     <description>Proxy user group.</description>
   </property>
   

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py

@@ -94,7 +94,6 @@ hdfs_user = status_params.hdfs_user
 user_group = config['configurations']['hadoop-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 nagios_group = config['configurations']['nagios-env']['nagios_group']
-smoke_user_group = "users"
 
 #hadoop params
 hadoop_conf_dir = "/etc/hadoop/conf"

+ 3 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/hive-env.xml

@@ -71,6 +71,7 @@
   <property>
     <name>hive_user</name>
     <value>hive</value>
+    <property-type>USER</property-type>
     <description>Hive User.</description>
   </property>
 
@@ -89,11 +90,13 @@
   <property>
     <name>hcat_user</name>
     <value>hcat</value>
+    <property-type>USER</property-type>
     <description>HCat User.</description>
   </property>
   <property>
     <name>webhcat_user</name>
     <value>hcat</value>
+    <property-type>USER</property-type>
     <description>WebHCat User.</description>
   </property>
   

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/hive-site.xml

@@ -52,7 +52,7 @@ limitations under the License.
   <property require-input="true">
     <name>javax.jdo.option.ConnectionPassword</name>
     <value> </value>
-    <type>PASSWORD</type>
+    <property-type>PASSWORD</property-type>
     <description>password to use against metastore database</description>
   </property>
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/configuration/mapred-env.xml

@@ -54,6 +54,7 @@
   <property>
     <name>mapred_user</name>
     <value>mapred</value>
+    <property-type>USER</property-type>
     <description>MapReduce User.</description>
   </property>
 

+ 3 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/configuration/nagios-env.xml

@@ -24,11 +24,13 @@
   <property>
     <name>nagios_user</name>
     <value>nagios</value>
+    <property-type>USER</property-type>
     <description>Nagios Username.</description>
   </property>
   <property>
     <name>nagios_group</name>
     <value>nagios</value>
+    <property-type>GROUP</property-type>
     <description>Nagios Group.</description>
   </property>
   <property>
@@ -39,7 +41,7 @@
   <property require-input = "true">
     <name>nagios_web_password</name>
     <value></value>
-    <type>PASSWORD</type>
+    <property-type>PASSWORD</property-type>
     <description>Nagios Admin Password.</description>
   </property>
   <property require-input = "true">

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/configuration/oozie-env.xml

@@ -24,6 +24,7 @@
   <property>
     <name>oozie_user</name>
     <value>oozie</value>
+    <property-type>USER</property-type>
     <description>Oozie User.</description>
   </property>
   <property>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/configuration/oozie-site.xml

@@ -217,7 +217,7 @@
   <property require-input = "true">
     <name>oozie.service.JPAService.jdbc.password</name>
     <value> </value>
-    <type>PASSWORD</type>
+    <property-type>PASSWORD</property-type>
     <description>
       DB user password.
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml

@@ -48,6 +48,7 @@ export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:
   <property>
     <name>sqoop_user</name>
     <description>User to run Sqoop as</description>
+    <property-type>USER</property-type>
     <value>sqoop</value>
   </property>
   

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/configuration/zookeeper-env.xml

@@ -24,6 +24,7 @@
   <property>
     <name>zk_user</name>
     <value>zookeeper</value>
+    <property-type>USER</property-type>
     <description>ZooKeeper User.</description>
   </property>
   <property>

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml

@@ -39,11 +39,13 @@
     <property>
         <name>smokeuser</name>
         <value>ambari-qa</value>
+        <property-type>USER</property-type>
         <description>User executing service checks</description>
     </property>
     <property>
         <name>user_group</name>
         <value>hadoop</value>
+        <property-type>GROUP</property-type>
         <description>Hadoop user group.</description>
     </property>
 </configuration>

+ 21 - 14
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py

@@ -20,34 +20,23 @@ limitations under the License.
 from resource_management import *
 from resource_management.core.system import System
 import os
+import json
+import collections
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 #users and groups
-yarn_user = config['configurations']['yarn-env']['yarn_user']
 hbase_user = config['configurations']['hbase-env']['hbase_user']
 nagios_user = config['configurations']['nagios-env']['nagios_user']
-oozie_user = config['configurations']['oozie-env']['oozie_user']
-webhcat_user = config['configurations']['hive-env']['hcat_user']
-hcat_user = config['configurations']['hive-env']['hcat_user']
-hive_user = config['configurations']['hive-env']['hive_user']
 smoke_user =  config['configurations']['hadoop-env']['smokeuser']
-mapred_user = config['configurations']['mapred-env']['mapred_user']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-zk_user = config['configurations']['zookeeper-env']['zk_user']
 gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
 gmond_user = config['configurations']['ganglia-env']["gmond_user"]
-storm_user = config['configurations']['storm-env']['storm_user']
-tez_user = config['configurations']['tez-env']['tez_user']
-falcon_user = config['configurations']['falcon-env']['falcon_user']
-sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
+tez_user = config['configurations']['tez-env']["tez_user"]
 
 user_group = config['configurations']['hadoop-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 nagios_group = config['configurations']['nagios-env']['nagios_group']
-smoke_user_group =  "users"
-mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 
 #hosts
 hostname = config["hostname"]
@@ -107,6 +96,24 @@ jce_location = config['hostLevelParams']['jdk_location']
 jdk_location = config['hostLevelParams']['jdk_location']
 ignore_groupsusers_create = default("/configurations/hadoop-env/ignore_groupsusers_create", False)
 
+smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+if has_hbase_masters:
+  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
 #repo params
 repo_info = config['hostLevelParams']['repo_info']
 service_repo_info = default("/hostLevelParams/service_repo_info",None)
+
+user_to_groups_dict = collections.defaultdict(lambda:[user_group])
+user_to_groups_dict[smoke_user] = [proxyuser_group]
+if has_ganglia_server:
+  user_to_groups_dict[gmond_user] = [gmond_user]
+  user_to_groups_dict[gmetad_user] = [gmetad_user]
+if has_tez:
+  user_to_groups_dict[tez_user] = [proxyuser_group]
+
+user_to_gid_dict = collections.defaultdict(lambda:user_group)
+if has_nagios:
+  user_to_gid_dict[nagios_user] = nagios_group
+
+user_list = json.loads(config['hostLevelParams']['user_list'])
+group_list = json.loads(config['hostLevelParams']['group_list'])

+ 11 - 125
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -26,137 +26,23 @@ def setup_users():
   Creates users before cluster installation
   """
   import params
-
-  Group(params.user_group, 
-        ignore_failures = params.ignore_groupsusers_create
-  )
   
-  Group(params.smoke_user_group,
-        ignore_failures = params.ignore_groupsusers_create
-  )
-  Group(params.proxyuser_group,
+  for group in params.group_list:
+    Group(group,
         ignore_failures = params.ignore_groupsusers_create
-  )
-  User(params.smoke_user,
-       gid=params.user_group,
-       groups=[params.proxyuser_group],
-       ignore_failures = params.ignore_groupsusers_create
-  )
-  smoke_user_dirs = format(
-    "/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
-  set_uid(params.smoke_user, smoke_user_dirs)
-
-  if params.has_hbase_masters:
-    User(params.hbase_user,
-         gid = params.user_group,
-         groups=[params.user_group],
-         ignore_failures = params.ignore_groupsusers_create)
-    hbase_user_dirs = format(
-      "/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
-    set_uid(params.hbase_user, hbase_user_dirs)
-
-  if params.has_nagios:
-    Group(params.nagios_group,
-      ignore_failures = params.ignore_groupsusers_create
-    )
-    User(params.nagios_user,
-         gid=params.nagios_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_oozie_server:
-    User(params.oozie_user,
-         gid = params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_hcat_server_host:
-    User(params.webhcat_user,
-         gid = params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-    User(params.hcat_user,
-         gid = params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_hive_server_host:
-    User(params.hive_user,
-         gid = params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_resourcemanager:
-    User(params.yarn_user,
-         gid = params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_ganglia_server:
-    Group(params.gmetad_user,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-    Group(params.gmond_user,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-    User(params.gmond_user,
-         gid=params.user_group,
-         groups=[params.gmond_user],
-         ignore_failures = params.ignore_groupsusers_create
-    )
-    User(params.gmetad_user,
-         gid=params.user_group,
-         groups=[params.gmetad_user],
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_namenode:
-    User(params.hdfs_user,
-          gid=params.user_group,
-          groups=[params.user_group],
-          ignore_failures = params.ignore_groupsusers_create
-    )
-  
-  if params.has_hs:
-    User(params.mapred_user,
-         gid=params.user_group,
-         groups=[params.user_group],
-         ignore_failures = params.ignore_groupsusers_create
-    )
-  
-  if params.has_zk_host:
-    User(params.zk_user,
-         gid=params.user_group,
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_storm_server:
-    User(params.storm_user,
-         gid=params.user_group,
-         groups=[params.user_group],
-         ignore_failures = params.ignore_groupsusers_create
-    )
-
-  if params.has_falcon_server:
-    User(params.falcon_user,
-         gid=params.user_group,
-         groups=[params.user_group],
-         ignore_failures = params.ignore_groupsusers_create
     )
     
-  if params.has_tez:
-    User(params.tez_user,
-      gid=params.user_group,
-      groups=[params.proxyuser_group],
-      ignore_failures = params.ignore_groupsusers_create
+  for user in params.user_list:
+    User(user,
+        gid = params.user_to_gid_dict[user],
+        groups = params.user_to_groups_dict[user],
+        ignore_failures = params.ignore_groupsusers_create       
     )
+           
+  set_uid(params.smoke_user, params.smoke_user_dirs)
 
-  if params.has_sqoop_client:
-    User(params.sqoop_user,
-         gid=params.user_group,
-         groups=[params.user_group],
-         ignore_failures=params.ignore_groupsusers_create
-    )
+  if params.has_hbase_masters:
+    set_uid(params.hbase_user, params.hbase_user_dirs)
 
 def set_uid(user, user_dirs):
   """

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/configuration/flume-env.xml

@@ -34,6 +34,7 @@
   <property>
     <name>flume_user</name>
     <value>flume</value>
+    <property-type>USER</property-type>
     <description>Flume User</description>
   </property>
 </configuration>

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/configuration/ganglia-env.xml

@@ -34,11 +34,13 @@
   <property>
     <name>gmetad_user</name>
     <value>nobody</value>
+    <property-type>USER GROUP</property-type>
     <description>User </description>
   </property>
     <property>
     <name>gmond_user</name>
     <value>nobody</value>
+    <property-type>USER GROUP</property-type>
     <description>User </description>
   </property>
   <property>

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/configuration/hbase-env.xml

@@ -54,6 +54,7 @@
    <property>
     <name>hbase_user</name>
     <value>hbase</value>
+    <property-type>USER</property-type>
     <description>HBase User Name.</description>
   </property>
 

+ 4 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/hadoop-env.xml

@@ -59,6 +59,7 @@
   <property>
     <name>proxyuser_group</name>
     <value>users</value>
+    <property-type>GROUP</property-type>
     <description>Proxy user group.</description>
   </property>
   <property>
@@ -74,6 +75,7 @@
   <property>
     <name>hdfs_user</name>
     <value>hdfs</value>
+    <property-type>USER</property-type>
     <description>User to run HDFS as</description>
   </property>
   <property>
@@ -84,11 +86,13 @@
   <property>
     <name>smokeuser</name>
     <value>ambari-qa</value>
+    <property-type>USER</property-type>
     <description>User executing service checks</description>
   </property>
   <property>
     <name>user_group</name>
     <value>hadoop</value>
+    <property-type>GROUP</property-type>
     <description>Proxy user group.</description>
   </property>
   

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -99,7 +99,6 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
 user_group = config['configurations']['hadoop-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 nagios_group = config['configurations']['nagios-env']['nagios_group']
-smoke_user_group = "users"
 
 #hadoop params
 hadoop_conf_dir = "/etc/hadoop/conf"

+ 3 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/hive-env.xml

@@ -61,6 +61,7 @@
   <property>
     <name>hive_user</name>
     <value>hive</value>
+    <property-type>USER</property-type>
     <description>Hive User.</description>
   </property>
 
@@ -79,11 +80,13 @@
   <property>
     <name>hcat_user</name>
     <value>hcat</value>
+    <property-type>USER</property-type>
     <description>HCat User.</description>
   </property>
   <property>
     <name>webhcat_user</name>
     <value>hcat</value>
+    <property-type>USER</property-type>
     <description>WebHCat User.</description>
   </property>
   

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/hive-site.xml

@@ -52,7 +52,7 @@ limitations under the License.
   <property require-input="true">
     <name>javax.jdo.option.ConnectionPassword</name>
     <value> </value>
-    <type>PASSWORD</type>
+    <property-type>PASSWORD</property-type>
     <description>password to use against metastore database</description>
   </property>
 

+ 3 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/configuration/nagios-env.xml

@@ -24,11 +24,13 @@
   <property>
     <name>nagios_user</name>
     <value>nagios</value>
+    <property-type>USER</property-type>
     <description>Nagios Username.</description>
   </property>
   <property>
     <name>nagios_group</name>
     <value>nagios</value>
+    <property-type>GROUP</property-type>
     <description>Nagios Group.</description>
   </property>
   <property>
@@ -39,7 +41,7 @@
   <property require-input = "true">
     <name>nagios_web_password</name>
     <value></value>
-    <type>PASSWORD</type>
+    <property-type>PASSWORD</property-type>
     <description>Nagios Admin Password.</description>
   </property>
   <property require-input = "true">

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml

@@ -24,6 +24,7 @@
   <property>
     <name>oozie_user</name>
     <value>oozie</value>
+    <property-type>USER</property-type>
     <description>Oozie User.</description>
   </property>
   <property>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-site.xml

@@ -224,7 +224,7 @@
   <property require-input = "true">
     <name>oozie.service.JPAService.jdbc.password</name>
     <value> </value>
-    <type>PASSWORD</type>
+    <property-type>PASSWORD</property-type>
     <description>
       DB user password.
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/configuration/sqoop-env.xml

@@ -48,6 +48,7 @@ export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:
   <property>
     <name>sqoop_user</name>
     <description>User to run Sqoop as</description>
+    <property-type>USER</property-type>
     <value>sqoop</value>
   </property>
 </configuration>

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/configuration-mapred/mapred-env.xml

@@ -34,6 +34,7 @@
   <property>
     <name>mapred_user</name>
     <value>mapred</value>
+    <property-type>USER</property-type>
     <description>Mapreduce User</description>
   </property>
   <property>

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/configuration/yarn-env.xml

@@ -34,6 +34,7 @@
   <property>
     <name>yarn_user</name>
     <value>yarn</value>
+    <property-type>USER</property-type>
     <description>YARN User</description>
   </property>
   <property>

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/configuration/zookeeper-env.xml

@@ -24,6 +24,7 @@
   <property>
     <name>zk_user</name>
     <value>zookeeper</value>
+    <property-type>USER</property-type>
     <description>ZooKeeper User.</description>
   </property>
   <property>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/configuration/hive-site.xml

@@ -52,7 +52,7 @@ limitations under the License.
   <property require-input="true">
     <name>javax.jdo.option.ConnectionPassword</name>
     <value></value>
-    <type>PASSWORD</type>
+    <property-type>PASSWORD</property-type>
     <description>password to use against metastore database</description>
   </property>
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/configuration/falcon-env.xml

@@ -22,6 +22,7 @@
   <property>
     <name>falcon_user</name>
     <value>falcon</value>
+    <property-type>USER</property-type>
     <description>Falcon user.</description>
   </property>
   <property>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml

@@ -52,7 +52,7 @@ limitations under the License.
   <property require-input="true">
     <name>javax.jdo.option.ConnectionPassword</name>
     <value></value>
-    <type>PASSWORD</type>
+    <property-type>PASSWORD</property-type>
     <description>password to use against metastore database</description>
   </property>
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/configuration/storm-env.xml

@@ -24,6 +24,7 @@
   <property>
     <name>storm_user</name>
     <value>storm</value>
+    <property-type>USER</property-type>
     <description></description>
   </property>
   <property>

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/configuration/tez-env.xml

@@ -24,6 +24,7 @@
   <property>
     <name>tez_user</name>
     <value>tez</value>
+    <property-type>USER</property-type>
     <description></description>
   </property>
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.1/services/YARN/configuration/yarn-env.xml

@@ -34,6 +34,7 @@
   <property>
     <name>yarn_user</name>
     <value>yarn</value>
+    <property-type>USER</property-type>
     <description>YARN User</description>
   </property>
   <property>

+ 2 - 2
ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java

@@ -1403,10 +1403,10 @@ public class AmbariMetaInfoTest {
     PropertyInfo passwordProperty = null;
     for (PropertyInfo propertyInfo : propertyInfoList) {
       if (propertyInfo.isRequireInput()
-          && propertyInfo.getType().equals(PropertyInfo.PropertyType.PASSWORD)) {
+          && propertyInfo.getPropertyTypes().contains(PropertyInfo.PropertyType.PASSWORD)) {
         passwordProperty = propertyInfo;
       } else {
-        Assert.assertEquals(PropertyInfo.PropertyType.DEFAULT, propertyInfo.getType());
+        Assert.assertTrue(propertyInfo.getPropertyTypes().isEmpty());
       }
     }
     Assert.assertNotNull(passwordProperty);

+ 3 - 3
ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java

@@ -585,7 +585,7 @@ public class StackExtensionHelperTest {
     assertEquals(null, propertyInfo.getFilename());
     assertEquals(false, propertyInfo.isDeleted());
     assertEquals(false, propertyInfo.isRequireInput());
-    assertEquals(PropertyInfo.PropertyType.DEFAULT, propertyInfo.getType());
+    assertTrue(propertyInfo.getPropertyTypes().isEmpty());
 
     propertyInfo = properties.get(1);
     assertEquals("yarn.scheduler.capacity.maximum-am-resource-percent", propertyInfo.getName());
@@ -596,7 +596,7 @@ public class StackExtensionHelperTest {
     assertEquals(null, propertyInfo.getFilename());
     assertEquals(true, propertyInfo.isDeleted());
     assertEquals(false, propertyInfo.isRequireInput());
-    assertEquals(PropertyInfo.PropertyType.DEFAULT, propertyInfo.getType());
+    assertTrue(propertyInfo.getPropertyTypes().isEmpty());
 
     propertyInfo = properties.get(2);
     assertEquals("yarn.scheduler.capacity.root.queues", propertyInfo.getName());
@@ -606,7 +606,7 @@ public class StackExtensionHelperTest {
     assertEquals(null, propertyInfo.getFilename());
     assertEquals(false, propertyInfo.isDeleted());
     assertEquals(true, propertyInfo.isRequireInput());
-    assertEquals(PropertyInfo.PropertyType.DEFAULT, propertyInfo.getType());
+    assertTrue(propertyInfo.getPropertyTypes().isEmpty());
   }
 
   @Test

+ 5 - 5
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java

@@ -328,7 +328,7 @@ public class ClusterResourceProviderTest {
     expect(blueprint.getStackName()).andReturn(stackName);
     expect(blueprint.getStackVersion()).andReturn(stackVersion);
     expect(blueprint.getConfigurations()).andReturn(configurations);
-    expect(blueprint.validateConfigurations(metaInfo, PropertyInfo.PropertyType.PASSWORD)).andReturn(
+    expect(blueprint.validateConfigurations(metaInfo, true)).andReturn(
         Collections.<String, Map<String, Collection<String>>>emptyMap());
 
     expect(metaInfo.getComponentDependencies("test", "1.23", "service1", "component1")).
@@ -765,7 +765,7 @@ public class ClusterResourceProviderTest {
     expect(blueprint.getStackName()).andReturn(stackName);
     expect(blueprint.getStackVersion()).andReturn(stackVersion);
     expect(blueprint.getConfigurations()).andReturn(Collections.<BlueprintConfigEntity>singletonList(blueprintConfig));
-    expect(blueprint.validateConfigurations(metaInfo, PropertyInfo.PropertyType.PASSWORD)).andReturn(allMissingPasswords);
+    expect(blueprint.validateConfigurations(metaInfo, true)).andReturn(allMissingPasswords);
 
     expect(metaInfo.getComponentDependencies("test", "1.23", "service1", "component1")).
         andReturn(Collections.<DependencyInfo>emptyList());
@@ -1550,7 +1550,7 @@ public class ClusterResourceProviderTest {
     expect(blueprint.getStackName()).andReturn(stackName);
     expect(blueprint.getStackVersion()).andReturn(stackVersion);
     expect(blueprint.getConfigurations()).andReturn(Collections.<BlueprintConfigEntity>singletonList(blueprintConfig));
-    expect(blueprint.validateConfigurations(metaInfo, PropertyInfo.PropertyType.PASSWORD)).andReturn(allMissingPasswords);
+    expect(blueprint.validateConfigurations(metaInfo, true)).andReturn(allMissingPasswords);
 
     expect(metaInfo.getComponentDependencies("test", "1.23", "service1", "component1")).
         andReturn(Collections.<DependencyInfo>emptyList());
@@ -1959,7 +1959,7 @@ public class ClusterResourceProviderTest {
     expect(blueprint.getStackName()).andReturn(stackName);
     expect(blueprint.getStackVersion()).andReturn(stackVersion);
     expect(blueprint.getConfigurations()).andReturn(Collections.<BlueprintConfigEntity>singletonList(blueprintConfig));
-    expect(blueprint.validateConfigurations(metaInfo, PropertyInfo.PropertyType.PASSWORD)).andReturn(
+    expect(blueprint.validateConfigurations(metaInfo, true)).andReturn(
         Collections.<String, Map<String, Collection<String>>>emptyMap());
 
     expect(metaInfo.getComponentDependencies("test", "1.23", "service1", "component1")).
@@ -2227,7 +2227,7 @@ public class ClusterResourceProviderTest {
     expect(blueprint.getStackName()).andReturn(stackName);
     expect(blueprint.getStackVersion()).andReturn(stackVersion);
     expect(blueprint.getConfigurations()).andReturn(configurations).times(2);
-    expect(blueprint.validateConfigurations(metaInfo, PropertyInfo.PropertyType.PASSWORD)).andReturn(
+    expect(blueprint.validateConfigurations(metaInfo, true)).andReturn(
         Collections.<String, Map<String, Collection<String>>>emptyMap());
 
     expect(metaInfo.getComponentDependencies("test", "1.23", "service1", "component1")).

+ 17 - 7
ambari-server/src/test/java/org/apache/ambari/server/orm/entities/BlueprintEntityTest.java

@@ -19,6 +19,7 @@
 package org.apache.ambari.server.orm.entities;
 
 import com.google.gson.Gson;
+
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.state.PropertyInfo;
 import org.junit.Test;
@@ -28,6 +29,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
+import java.util.Set;
 
 import static org.easymock.EasyMock.createMock;
 import static org.easymock.EasyMock.expect;
@@ -87,7 +89,9 @@ public class BlueprintEntityTest {
     prop.setFilename("core-site.xml");
     prop.setName("super.secret.password");
     prop.setRequireInput(true);
-    prop.setType(PropertyInfo.PropertyType.PASSWORD);
+    Set<PropertyInfo.PropertyType> propertyTypes = new HashSet<PropertyInfo.PropertyType>();
+    propertyTypes.add(PropertyInfo.PropertyType.PASSWORD);
+    prop.setPropertyTypes(propertyTypes);
     prop.setValue(null);
     requiredProps.put("super.secret.password", prop);
 
@@ -131,7 +135,7 @@ public class BlueprintEntityTest {
     replay(metaInfo);
 
     Map<String, Map<String, Collection<String>>> missingProps = entity.validateConfigurations(
-        metaInfo, PropertyInfo.PropertyType.PASSWORD);
+        metaInfo, true);
 
     assertTrue(missingProps.isEmpty());
 
@@ -147,7 +151,9 @@ public class BlueprintEntityTest {
     prop.setFilename("core-site.xml");
     prop.setName("super.secret.password");
     prop.setRequireInput(true);
-    prop.setType(PropertyInfo.PropertyType.PASSWORD);
+    Set<PropertyInfo.PropertyType> propertyTypes = new HashSet<PropertyInfo.PropertyType>();
+    propertyTypes.add(PropertyInfo.PropertyType.PASSWORD);
+    prop.setPropertyTypes(propertyTypes);
     prop.setValue(null);
     requiredProps.put("super.secret.password", prop);
 
@@ -192,7 +198,7 @@ public class BlueprintEntityTest {
     replay(metaInfo);
 
     Map<String, Map<String, Collection<String>>> missingProps = entity.validateConfigurations(
-        metaInfo, PropertyInfo.PropertyType.PASSWORD);
+        metaInfo, true);
 
     assertTrue(missingProps.isEmpty());
 
@@ -208,14 +214,18 @@ public class BlueprintEntityTest {
     prop.setFilename("core-site.xml");
     prop.setName("super.secret.password");
     prop.setRequireInput(true);
-    prop.setType(PropertyInfo.PropertyType.PASSWORD);
+    Set<PropertyInfo.PropertyType> propertyTypes = new HashSet<PropertyInfo.PropertyType>();
+    propertyTypes.add(PropertyInfo.PropertyType.PASSWORD);
+    prop.setPropertyTypes(propertyTypes);
     prop.setValue(null);
 
     PropertyInfo prop2 = new PropertyInfo();
     prop2.setFilename("global.xml");
     prop2.setName("another.super.secret.password");
     prop2.setRequireInput(true);
-    prop2.setType(PropertyInfo.PropertyType.PASSWORD);
+    Set<PropertyInfo.PropertyType> propertyTypes2 = new HashSet<PropertyInfo.PropertyType>();
+    propertyTypes2.add(PropertyInfo.PropertyType.PASSWORD);
+    prop2.setPropertyTypes(propertyTypes2);
     prop2.setValue(" ");
 
     requiredProps.put("super.secret.password", prop);
@@ -261,7 +271,7 @@ public class BlueprintEntityTest {
     replay(metaInfo);
 
     Map<String, Map<String, Collection<String>>> missingProps = entity.validateConfigurations(
-        metaInfo, PropertyInfo.PropertyType.PASSWORD);
+        metaInfo, true);
 
     assertEquals(1, missingProps.size());
     Map<String, Collection<String>> typeProps = missingProps.get("hg1");

+ 1 - 1
ambari-server/src/test/resources/stacks/HDP/2.0.1/services/HIVE/configuration/hive-site.xml

@@ -46,7 +46,7 @@ limitations under the License.
   <property require-input="true">
     <name>javax.jdo.option.ConnectionPassword</name>
     <value></value>
-    <type>PASSWORD</type>
+    <property-type>PASSWORD</property-type>
     <description>password to use against metastore database</description>
   </property>