瀏覽代碼

AMBARI-9186 : Blueprint contains password fields in cluster-env [hadoop.user.password, sink.dbpassword] (jluniya)

Jayush Luniya 10 年之前
父節點
當前提交
7c5fb7b534

+ 39 - 15
ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java

@@ -41,6 +41,7 @@ import org.apache.ambari.server.state.PropertyInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackInfo;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
@@ -175,12 +176,25 @@ public class ClusterBlueprintRenderer extends BaseRenderer implements Renderer {
    * @param stackVersion  stack version
    */
   private void determinePropertiesToStrip(TreeNode<Resource> servicesNode, String stackName, String stackVersion) {
-    AmbariMetaInfo stackInfo = getController().getAmbariMetaInfo();
+    AmbariMetaInfo ambariMetaInfo = getController().getAmbariMetaInfo();
+    StackInfo stack;
+    try {
+      stack = ambariMetaInfo.getStack(stackName, stackVersion);
+    } catch (AmbariException e) {
+      // shouldn't ever happen.
+      // Exception indicates that stack is not defined
+      // but we are getting the stack name from a running cluster.
+      throw new RuntimeException("Unexpected exception occurred while generating a blueprint. "  +
+          "The stack '" + stackName + ":" + stackVersion + "' does not exist");
+    }
+    Map<String, PropertyInfo> requiredStackProperties = stack.getRequiredProperties();
+    updatePropertiesToStrip(requiredStackProperties);
+
     for (TreeNode<Resource> serviceNode : servicesNode.getChildren()) {
       String name = (String) serviceNode.getObject().getPropertyValue("ServiceInfo/service_name");
       ServiceInfo service;
       try {
-        service = stackInfo.getService(stackName, stackVersion, name);
+        service = ambariMetaInfo.getService(stackName, stackVersion, name);
       } catch (AmbariException e) {
         // shouldn't ever happen.
         // Exception indicates that service is not in the stack
@@ -190,20 +204,30 @@ public class ClusterBlueprintRenderer extends BaseRenderer implements Renderer {
       }
 
       Map<String, PropertyInfo> requiredProperties = service.getRequiredProperties();
-      for (Map.Entry<String, PropertyInfo> entry : requiredProperties.entrySet()) {
-        String propertyName = entry.getKey();
-        PropertyInfo propertyInfo = entry.getValue();
-        String configCategory = propertyInfo.getFilename();
-        if (configCategory.endsWith(".xml")) {
-          configCategory = configCategory.substring(0, configCategory.indexOf(".xml"));
-        }
-        Collection<String> categoryProperties = propertiesToStrip.get(configCategory);
-        if (categoryProperties == null) {
-          categoryProperties = new ArrayList<String>();
-          propertiesToStrip.put(configCategory, categoryProperties);
-        }
-        categoryProperties.add(propertyName);
+      updatePropertiesToStrip(requiredProperties);
+    }
+  }
+
+  /**
+   * Helper method to update propertiesToStrip with properties that are marked as required
+   *
+   * @param requiredProperties  Properties marked as required
+   */
+  private void updatePropertiesToStrip(Map<String, PropertyInfo> requiredProperties) {
+
+    for (Map.Entry<String, PropertyInfo> entry : requiredProperties.entrySet()) {
+      String propertyName = entry.getKey();
+      PropertyInfo propertyInfo = entry.getValue();
+      String configCategory = propertyInfo.getFilename();
+      if (configCategory.endsWith(".xml")) {
+        configCategory = configCategory.substring(0, configCategory.indexOf(".xml"));
+      }
+      Collection<String> categoryProperties = propertiesToStrip.get(configCategory);
+      if (categoryProperties == null) {
+        categoryProperties = new ArrayList<String>();
+        propertiesToStrip.put(configCategory, categoryProperties);
       }
+      categoryProperties.add(propertyName);
     }
   }
 

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackLevelConfigurationResourceProvider.java

@@ -58,7 +58,7 @@ public class StackLevelConfigurationResourceProvider extends
       .getPropertyId("StackLevelConfigurations", "property_description");
   
   public static final String PROPERTY_PROPERTY_TYPE_PROPERTY_ID = PropertyHelper
-      .getPropertyId("StackConfigurations", "property_type");
+      .getPropertyId("StackLevelConfigurations", "property_type");
 
   public static final String PROPERTY_TYPE_PROPERTY_ID = PropertyHelper
       .getPropertyId("StackLevelConfigurations", "type");

+ 22 - 0
ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java

@@ -55,6 +55,8 @@ public class StackInfo implements Comparable<StackInfo>{
 
   private String upgradesFolder = null;
 
+  private volatile Map<String, PropertyInfo> requiredProperties;
+
   public String getName() {
     return name;
   }
@@ -326,4 +328,24 @@ public class StackInfo implements Comparable<StackInfo>{
     String oId = o.name + "-" + o.version;
     return myId.compareTo(oId);
   }
+
+  //todo: ensure that required properties are never modified...
+  public Map<String, PropertyInfo> getRequiredProperties() {
+    Map<String, PropertyInfo> result = requiredProperties;
+    if (result == null) {
+      synchronized(this) {
+        result = requiredProperties;
+        if (result == null) {
+          requiredProperties = result = new HashMap<String, PropertyInfo>();
+          List<PropertyInfo> properties = getProperties();
+          for (PropertyInfo propertyInfo : properties) {
+            if (propertyInfo.isRequireInput()) {
+              result.put(propertyInfo.getName(), propertyInfo);
+            }
+          }
+        }
+      }
+    }
+    return result;
+  }
 }

+ 1 - 1
ambari-server/src/main/resources/properties.json

@@ -420,7 +420,7 @@
         "StackLevelConfigurations/property_description",
         "StackLevelConfigurations/type",
         "StackLevelConfigurations/final",
-        "StackConfigurations/property_type",
+        "StackLevelConfigurations/property_type",
         "_"
     ]
 }

+ 44 - 36
ambari-server/src/main/resources/stacks/HDPWIN/2.1/configuration/cluster-env.xml

@@ -21,40 +21,48 @@
 -->
 
 <configuration>
-    <property>
-      <name>hadoop.user.name</name>
-      <value>hadoop</value>
-    </property>
-    <property>
-        <name>security_enabled</name>
-        <value>false</value>
-        <description>Hadoop Security</description>
-    </property>
-    <property>
-        <name>kerberos_domain</name>
-        <value>EXAMPLE.COM</value>
-        <description>Kerberos realm.</description>
-    </property>
-    <property>
-        <name>ignore_groupsusers_create</name>
-        <value>false</value>
-        <description>Whether to ignore failures on users and group creation</description>
-    </property>
-    <property>
-        <name>smokeuser</name>
-        <value>ambari-qa</value>
-        <property-type>USER</property-type>
-        <description>User executing service checks</description>
-    </property>
-    <property>
-        <name>smokeuser_keytab</name>
-        <value>/etc/security/keytabs/smokeuser.headless.keytab</value>
-        <description>Path to smoke test user keytab file</description>
-    </property>
-    <property>
-        <name>user_group</name>
-        <value>hadoop</value>
-        <property-type>GROUP</property-type>
-        <description>Hadoop user group.</description>
-    </property>
+  <property>
+    <name>hadoop.user.name</name>
+    <value>hadoop</value>
+    <property-type>USER</property-type>
+    <description>User to run Hadoop services under</description>
+  </property>
+  <property require-input="true">
+    <name>hadoop.user.password</name>
+    <value> </value>
+    <property-type>PASSWORD</property-type>
+    <description>Password for hadoop user</description>
+  </property>
+  <property>
+    <name>security_enabled</name>
+    <value>false</value>
+    <description>Hadoop Security</description>
+  </property>
+  <property>
+    <name>kerberos_domain</name>
+    <value>EXAMPLE.COM</value>
+    <description>Kerberos realm.</description>
+  </property>
+  <property>
+    <name>ignore_groupsusers_create</name>
+    <value>false</value>
+    <description>Whether to ignore failures on users and group creation</description>
+  </property>
+  <property>
+    <name>smokeuser</name>
+    <value>ambari-qa</value>
+    <property-type>USER</property-type>
+    <description>User executing service checks</description>
+  </property>
+  <property>
+    <name>smokeuser_keytab</name>
+    <value>/etc/security/keytabs/smokeuser.headless.keytab</value>
+    <description>Path to smoke test user keytab file</description>
+  </property>
+  <property>
+    <name>user_group</name>
+    <value>hadoop</value>
+    <property-type>GROUP</property-type>
+    <description>Hadoop user group.</description>
+  </property>
 </configuration>

+ 11 - 8
ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java

@@ -31,8 +31,10 @@ import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.internal.ResourceImpl;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackInfo;
 import org.junit.Test;
 
+import java.lang.reflect.Field;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.Arrays;
@@ -45,6 +47,7 @@ import java.util.Set;
 import static org.easymock.EasyMock.createMock;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
@@ -102,14 +105,14 @@ public class ClusterBlueprintRendererTest {
   public void testFinalizeResult() throws Exception{
 
     AmbariManagementController controller = createMock(AmbariManagementController.class);
-    AmbariMetaInfo stackInfo = createNiceMock(AmbariMetaInfo.class);
-    ServiceInfo hdfsService = new ServiceInfo();
-    hdfsService.setName("HDFS");
-    ServiceInfo mrService = new ServiceInfo();
-    mrService.setName("MAPREDUCE");
-
-    expect(stackInfo.getService("HDP", "1.3.3", "HDFS")).andReturn(hdfsService);
-    expect(stackInfo.getService("HDP", "1.3.3", "MAPREDUCE")).andReturn(mrService);
+    AmbariMetaInfo ambariMetaInfo = createNiceMock(AmbariMetaInfo.class);
+    StackInfo stack = new StackInfo();
+    stack.setName("HDP");
+    stack.setVersion("1.3.3");
+
+    expect(controller.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
+    expect(ambariMetaInfo.getStack("HDP", "1.3.3")).andReturn(stack).anyTimes();
+    replay(controller, ambariMetaInfo);
 
     Result result = new ResultImpl(true);
     createClusterResultTree(result.getResultTree());

+ 41 - 35
ambari-web/app/data/HDP2/site_properties.js

@@ -18,9 +18,7 @@
 
 var App = require('app');
 
-module.exports =
-{
-  "configProperties": [
+var hdp2properties = [
   //***************************************** HDP stack **************************************
   /**********************************************HDFS***************************************/
     {
@@ -46,37 +44,6 @@ module.exports =
       "filename": "hdfs-site.xml",
       "index": 3
     },
-    {
-      "id": "puppet var",
-      "name": "hadoop.user.name",
-      "displayName": "Hadoop User Name",
-      "description": "User to run Hadoop services under",
-      "defaultValue": "hadoop",
-      "isReconfigurable": false,
-      "displayType": "user",
-      "isOverridable": false,
-      "isVisible": App.get('isHadoopWindowsStack'),
-      "serviceName": "MISC",
-      "filename": "cluster-env.xml",
-      "category": "Users and Groups",
-      "belongsToService": ["HDFS"],
-      "index": 0
-    },
-    {
-      "id": "puppet var",
-      "name": "hadoop.user.password",
-      "displayName": "Hadoop User Password",
-      "description": "Password for hadoop user",
-      "isReconfigurable": false,
-      "displayType": "password",
-      "isOverridable": false,
-      "isVisible": App.get('isHadoopWindowsStack'),
-      "serviceName": "MISC",
-      "filename": "cluster-env.xml",
-      "category": "Users and Groups",
-      "belongsToService": ["HDFS"],
-      "index": 1
-    },
     {
       "id": "site property",
       "name": "dfs.namenode.name.dir",
@@ -4540,5 +4507,44 @@ module.exports =
       "category": "Ambari Principals",
       "index" : 6
     }
-  ]
+  ];
+if (App.get('isHadoopWindowsStack')) {
+  hdp2properties.push(
+    {
+      "id": "puppet var",
+      "name": "hadoop.user.name",
+      "displayName": "Hadoop User Name",
+      "description": "User to run Hadoop services under",
+      "defaultValue": "hadoop",
+      "isReconfigurable": false,
+      "displayType": "user",
+      "isOverridable": false,
+      "isVisible": App.get('isHadoopWindowsStack'),
+      "serviceName": "MISC",
+      "filename": "cluster-env.xml",
+      "category": "Users and Groups",
+      "belongsToService": ["HDFS"],
+      "index": 0
+    },
+    {
+      "id": "puppet var",
+      "name": "hadoop.user.password",
+      "displayName": "Hadoop User Password",
+      "description": "Password for hadoop user",
+      "isReconfigurable": false,
+      "displayType": "password",
+      "isOverridable": false,
+      "isVisible": App.get('isHadoopWindowsStack'),
+      "serviceName": "MISC",
+      "filename": "cluster-env.xml",
+      "category": "Users and Groups",
+      "belongsToService": ["HDFS"],
+      "index": 1
+    }
+  );
+}
+
+module.exports =
+{
+  "configProperties": hdp2properties
 };

+ 1 - 1
ambari-web/app/utils/config.js

@@ -830,7 +830,7 @@ App.config = Em.Object.create({
     var properties = [];
     if (data.items.length) {
       data.items.forEach(function (item) {
-        item.StackLevelConfigurations.property_type = item.StackConfigurations.property_type || [];
+        item.StackLevelConfigurations.property_type = item.StackLevelConfigurations.property_type || [];
         item.StackLevelConfigurations.service_name = 'MISC';
         var property = this.createAdvancedPropertyObject(item.StackLevelConfigurations);
         if (property) properties.push(property);

+ 4 - 8
ambari-web/test/mock_data_setup/configs_mock_data.js

@@ -471,13 +471,11 @@ module.exports = {
   advancedClusterConfigs: {
     items: [
       {
-        "StackConfigurations" : {
-          "property_type" : [ ]
-        },
         "StackLevelConfigurations" : {
           "final" : "false",
           "property_description" : "Whether to ignore failures on users and group creation",
           "property_name" : "ignore_groupsusers_create",
+          "property_type" : [ ],
           "property_value" : "false",
           "stack_name" : "HDP",
           "stack_version" : "2.2",
@@ -485,15 +483,13 @@ module.exports = {
         }
       },
       {
-        "StackConfigurations" : {
-          "property_type" : [
-            "GROUP"
-          ]
-        },
         "StackLevelConfigurations" : {
           "final" : "false",
           "property_description" : "Hadoop user group.",
           "property_name" : "user_group",
+          "property_type" : [
+            "GROUP"
+          ],
           "property_value" : "hadoop",
           "stack_name" : "HDP",
           "stack_version" : "2.2",