Browse Source

AMBARI-7415. Properties from Xml should be automatically added during upgrade (aonishuk)

Andrew Onishuk 10 years ago
parent
commit
d99a275f55

+ 39 - 0
ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java

@@ -474,6 +474,8 @@ public class ConfigHelper {
     
     
     for(ServiceInfo serviceInfo:stack.getServices()) {     
     for(ServiceInfo serviceInfo:stack.getServices()) {     
       Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), serviceInfo.getName());
       Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), serviceInfo.getName());
+      Set<PropertyInfo> stackLevelProperties = ambariMetaInfo.getStackProperties(stack.getName(), stack.getVersion());
+      stackProperties.addAll(stackLevelProperties);
       
       
       for (PropertyInfo stackProperty : stackProperties) {
       for (PropertyInfo stackProperty : stackProperties) {
         String stackPropertyConfigType = fileNameToConfigType(stackProperty.getFilename());
         String stackPropertyConfigType = fileNameToConfigType(stackProperty.getFilename());
@@ -488,6 +490,43 @@ public class ConfigHelper {
     return null;
     return null;
   }
   }
   
   
+  public ServiceInfo getPropertyOwnerService(Cluster cluster, String configType, String propertyName) throws AmbariException {
+    StackId stackId = cluster.getCurrentStackVersion();
+    StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+        stackId.getStackVersion());
+    
+    for(ServiceInfo serviceInfo:stack.getServices()) {     
+      Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), serviceInfo.getName());
+      
+      for (PropertyInfo stackProperty : stackProperties) {
+        String stackPropertyConfigType = fileNameToConfigType(stackProperty.getFilename());
+        
+        if(stackProperty.getName().equals(propertyName) && stackPropertyConfigType.equals(configType)) {
+          return serviceInfo;
+        }
+      }
+      
+    }
+    
+    return null;
+  }
+  
+  public Set<PropertyInfo> getServiceProperties(Cluster cluster, String serviceName) throws AmbariException {
+    StackId stackId = cluster.getCurrentStackVersion();
+    StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+        stackId.getStackVersion());
+    
+    return ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), serviceName);
+  }
+  
+  public Set<PropertyInfo> getStackProperties(Cluster cluster) throws AmbariException {
+    StackId stackId = cluster.getCurrentStackVersion();
+    StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+        stackId.getStackVersion());
+    
+    return ambariMetaInfo.getStackProperties(stack.getName(), stack.getVersion());
+  }
+  
   public void createConfigType(Cluster cluster, AmbariManagementController ambariManagementController, 
   public void createConfigType(Cluster cluster, AmbariManagementController ambariManagementController, 
       String configType, Map<String, String> properties, String authName) throws AmbariException {
       String configType, Map<String, String> properties, String authName) throws AmbariException {
     String tag;
     String tag;

+ 165 - 50
ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java

@@ -22,6 +22,7 @@ import com.google.inject.Inject;
 import com.google.inject.Injector;
 import com.google.inject.Injector;
 import com.google.inject.Provider;
 import com.google.inject.Provider;
 import com.google.inject.persist.Transactional;
 import com.google.inject.persist.Transactional;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -32,16 +33,26 @@ import org.apache.ambari.server.orm.entities.MetainfoEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.utils.VersionUtils;
 import org.apache.ambari.server.utils.VersionUtils;
 import org.slf4j.Logger;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.LoggerFactory;
 
 
 import javax.persistence.EntityManager;
 import javax.persistence.EntityManager;
+
 import java.sql.SQLException;
 import java.sql.SQLException;
 import java.util.Collections;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Map;
+import java.util.Set;
+import java.util.Map.Entry;
 
 
 public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
 public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
   @Inject
   @Inject
@@ -172,6 +183,158 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
       dbAccessor.executeQuery(String.format("ALTER ROLE %s SET search_path to '%s';", dbUser, schemaName));
       dbAccessor.executeQuery(String.format("ALTER ROLE %s SET search_path to '%s';", dbUser, schemaName));
     }
     }
   }
   }
+  
+  public void addNewConfigurationsFromXml() throws AmbariException {
+    ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
+    AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
+    
+    Clusters clusters = controller.getClusters();
+    if (clusters == null) {
+      return;
+    }
+    Map<String, Cluster> clusterMap = clusters.getClusters();
+
+    if (clusterMap != null && !clusterMap.isEmpty()) {
+      for (Cluster cluster : clusterMap.values()) {
+        Map<String, Set<String>> newProperties = new HashMap<String, Set<String>>();
+        
+        Set<PropertyInfo> stackProperties = configHelper.getStackProperties(cluster);
+        for(String serviceName: cluster.getServices().keySet()) {
+          Set<PropertyInfo> properties = configHelper.getServiceProperties(cluster, serviceName);
+          
+          if(properties == null) {
+            continue;
+          }
+          properties.addAll(stackProperties);
+          
+          for(PropertyInfo property:properties) {
+            String configType = ConfigHelper.fileNameToConfigType(property.getFilename());
+            Config clusterConfigs = cluster.getDesiredConfigByType(configType);
+            if(clusterConfigs == null || !clusterConfigs.getProperties().containsKey(property.getName())) {
+              LOG.info("Config " + property.getName() + " from " + configType + " from xml configurations" +
+                  " is not found on the cluster. Adding it...");
+              
+              if(!newProperties.containsKey(configType)) {
+                newProperties.put(configType, new HashSet<String>());
+              }
+              newProperties.get(configType).add(property.getName());
+            }
+          }
+        }
+        
+        
+        
+        for (Entry<String, Set<String>> newProperty : newProperties.entrySet()) {
+          updateConfigurationPropertiesWithValuesFromXml(newProperty.getKey(), newProperty.getValue(), false, true);
+        }
+      }
+    }
+  }
+  
+  /**
+   * Create a new cluster scoped configuration with the new properties added
+   * with the values from the coresponding xml files.
+   * 
+   * If xml owner service is not in the cluster, the configuration won't be added.
+   * 
+   * @param configType Configuration type. (hdfs-site, etc.)
+   * @param properties Set property names.
+   */
+  protected void updateConfigurationPropertiesWithValuesFromXml(String configType,
+      Set<String> propertyNames, boolean updateIfExists, boolean createNewConfigType) throws AmbariException {
+    ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
+    AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
+    
+    Clusters clusters = controller.getClusters();
+    if (clusters == null) {
+      return;
+    }
+    Map<String, Cluster> clusterMap = clusters.getClusters();
+
+    if (clusterMap != null && !clusterMap.isEmpty()) {
+      for (Cluster cluster : clusterMap.values()) {
+        Map<String, String> properties = new HashMap<String, String>();
+        
+        for(String propertyName:propertyNames) {
+          String propertyValue = configHelper.getPropertyValueFromStackDefenitions(cluster, configType, propertyName);
+          
+          if(propertyValue == null) {
+            LOG.info("Config " + propertyName + " from " + configType + " is not found in xml definitions." +
+                "Skipping configuration property update");
+            continue;
+          }
+          
+          ServiceInfo propertyService = configHelper.getPropertyOwnerService(cluster, configType, propertyName);
+          if(propertyService != null && !cluster.getServices().containsKey(propertyService.getName())) {
+            LOG.info("Config " + propertyName + " from " + configType + " with value = " + propertyValue + " " +
+                "Is not added due to service " + propertyService.getName() + " is not in the cluster.");
+            continue;
+          }
+          
+          properties.put(propertyName, propertyValue);
+        }
+        
+        updateConfigurationPropertiesForCluster(cluster, configType,
+            properties, updateIfExists, createNewConfigType);
+      }
+    }
+  }
+  
+  protected void updateConfigurationPropertiesForCluster(Cluster cluster, String configType,
+      Map<String, String> properties, boolean updateIfExists, boolean createNewConfigType) throws AmbariException {
+    AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
+    String newTag = "version" + System.currentTimeMillis();
+    
+    if (properties != null) {
+      Map<String, Config> all = cluster.getConfigsByType(configType);
+      if (all == null || !all.containsKey(newTag) || properties.size() > 0) {
+        Map<String, String> oldConfigProperties;
+        Config oldConfig = cluster.getDesiredConfigByType(configType);
+        
+        if (oldConfig == null && !createNewConfigType) {
+          LOG.info("Config " + configType + " not found. Assuming service not installed. " +
+              "Skipping configuration properties update");
+          return;
+        } else if (oldConfig == null) {
+          oldConfigProperties = new HashMap<String, String>();
+          newTag = "version1";
+        } else {
+          oldConfigProperties = oldConfig.getProperties();
+        }
+
+        Map<String, String> mergedProperties =
+          mergeProperties(oldConfigProperties, properties, updateIfExists);
+
+        if (!Maps.difference(oldConfigProperties, mergedProperties).areEqual()) {
+          LOG.info("Applying configuration with tag '{}' to " +
+            "cluster '{}'", newTag, cluster.getClusterName());
+
+          ConfigurationRequest cr = new ConfigurationRequest();
+          cr.setClusterName(cluster.getClusterName());
+          cr.setVersionTag(newTag);
+          cr.setType(configType);
+          cr.setProperties(mergedProperties);
+          controller.createConfiguration(cr);
+
+          Config baseConfig = cluster.getConfig(cr.getType(), cr.getVersionTag());
+          if (baseConfig != null) {
+            String authName = "ambari-upgrade";
+
+            if (cluster.addDesiredConfig(authName, Collections.singleton(baseConfig)) != null) {
+              String oldConfigString = (oldConfig != null) ? " from='" + oldConfig.getTag() + "'" : "";
+              LOG.info("cluster '" + cluster.getClusterName() + "' "
+                + "changed by: '" + authName + "'; "
+                + "type='" + baseConfig.getType() + "' "
+                + "tag='" + baseConfig.getTag() + "'"
+                + oldConfigString);
+            }
+          }
+        } else {
+          LOG.info("No changes detected to config " + configType + ". Skipping configuration properties update");
+        }
+      }
+    }
+  }
 
 
   /**
   /**
    * Create a new cluster scoped configuration with the new properties added
    * Create a new cluster scoped configuration with the new properties added
@@ -183,7 +346,6 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
         Map<String, String> properties, boolean updateIfExists, boolean createNewConfigType) throws
         Map<String, String> properties, boolean updateIfExists, boolean createNewConfigType) throws
     AmbariException {
     AmbariException {
     AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
     AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
-    String newTag = "version" + System.currentTimeMillis();
 
 
     Clusters clusters = controller.getClusters();
     Clusters clusters = controller.getClusters();
     if (clusters == null) {
     if (clusters == null) {
@@ -193,55 +355,8 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
 
 
     if (clusterMap != null && !clusterMap.isEmpty()) {
     if (clusterMap != null && !clusterMap.isEmpty()) {
       for (Cluster cluster : clusterMap.values()) {
       for (Cluster cluster : clusterMap.values()) {
-        if (properties != null) {
-          Map<String, Config> all = cluster.getConfigsByType(configType);
-          if (all == null || !all.containsKey(newTag) || properties.size() > 0) {
-            Map<String, String> oldConfigProperties;
-            Config oldConfig = cluster.getDesiredConfigByType(configType);
-            
-            if (oldConfig == null && !createNewConfigType) {
-              LOG.info("Config " + configType + " not found. Assuming service not installed. " +
-                  "Skipping configuration properties update");
-              return;
-            } else if (oldConfig == null) {
-              oldConfigProperties = new HashMap<String, String>();
-              newTag = "version1";
-            } else {
-              oldConfigProperties = oldConfig.getProperties();
-            }
-
-            Map<String, String> mergedProperties =
-              mergeProperties(oldConfigProperties, properties, updateIfExists);
-
-            if (!Maps.difference(oldConfigProperties, mergedProperties).areEqual()) {
-              LOG.info("Applying configuration with tag '{}' to " +
-                "cluster '{}'", newTag, cluster.getClusterName());
-
-              ConfigurationRequest cr = new ConfigurationRequest();
-              cr.setClusterName(cluster.getClusterName());
-              cr.setVersionTag(newTag);
-              cr.setType(configType);
-              cr.setProperties(mergedProperties);
-              controller.createConfiguration(cr);
-
-              Config baseConfig = cluster.getConfig(cr.getType(), cr.getVersionTag());
-              if (baseConfig != null) {
-                String authName = "ambari-upgrade";
-
-                if (cluster.addDesiredConfig(authName, Collections.singleton(baseConfig)) != null) {
-                  String oldConfigString = (oldConfig != null) ? " from='" + oldConfig.getTag() + "'" : "";
-                  LOG.info("cluster '" + cluster.getClusterName() + "' "
-                    + "changed by: '" + authName + "'; "
-                    + "type='" + baseConfig.getType() + "' "
-                    + "tag='" + baseConfig.getTag() + "'"
-                    + oldConfigString);
-                }
-              }
-            } else {
-              LOG.info("No changes detected to config " + configType + ". Skipping configuration properties update");
-            }
-          }
-        }
+        updateConfigurationPropertiesForCluster(cluster, configType,
+            properties, updateIfExists, createNewConfigType);
       }
       }
     }
     }
   }
   }

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog161.java

@@ -311,7 +311,7 @@ public class UpgradeCatalog161 extends AbstractUpgradeCatalog {
             "reducer=1000000000\npig.exec.reducers.max=999\n\n# Temporary location to store the intermediate " +
             "reducer=1000000000\npig.exec.reducers.max=999\n\n# Temporary location to store the intermediate " +
             "data.\npig.temp.dir=/tmp/\n\n# Threshold for merging FRJoin fragment files\npig.files.concatenation." +
             "data.\npig.temp.dir=/tmp/\n\n# Threshold for merging FRJoin fragment files\npig.files.concatenation." +
             "threshold=100\npig.optimistic.files.concatenation=false;\n\npig.disable.counter=false\n\n" +
             "threshold=100\npig.optimistic.files.concatenation=false;\n\npig.disable.counter=false\n\n" +
-            "hcat.bin=/usr/bin/hcat"), true, true);
+            "hcat.bin=/usr/bin/hcat"), true, false);
   }
   }
 
 
   @Override
   @Override

+ 4 - 23
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java

@@ -633,11 +633,11 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
 
 
     moveGlobalsToEnv();
     moveGlobalsToEnv();
     addEnvContentFields();
     addEnvContentFields();
-    addMissingConfigs();
     renamePigProperties();
     renamePigProperties();
     upgradePermissionModel();
     upgradePermissionModel();
     addJobsViewPermissions();
     addJobsViewPermissions();
     moveConfigGroupsGlobalToEnv();
     moveConfigGroupsGlobalToEnv();
+    addMissingConfigs();
   }
   }
 
 
   public void moveHcatalogIntoHiveService() throws AmbariException {
   public void moveHcatalogIntoHiveService() throws AmbariException {
@@ -993,25 +993,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
   }
   }
 
 
   protected void addMissingConfigs() throws AmbariException {
   protected void addMissingConfigs() throws AmbariException {
-    updateConfigurationProperties("hbase-env",
-        Collections.singletonMap("hbase_regionserver_xmn_max", "512"), false,
-        false);
-
-    updateConfigurationProperties("hbase-env",
-        Collections.singletonMap("hbase_regionserver_xmn_ratio", "0.2"), false,
-        false);
-
-    updateConfigurationProperties("yarn-env",
-        Collections.singletonMap("min_user_id", "1000"), false,
-        false);
-
-    updateConfigurationProperties("sqoop-env", Collections.singletonMap("sqoop_user", "sqoop"), false, false);
-
-    updateConfigurationProperties("hadoop-env",
-            Collections.singletonMap("hadoop_root_logger", "INFO,RFA"), false,
-            false);
-
-    updateConfigurationProperties("oozie-env", Collections.singletonMap("oozie_admin_port", "11001"), false, false);
+    addNewConfigurationsFromXml();
   }
   }
 
 
   /**
   /**
@@ -1065,8 +1047,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
             continue;
             continue;
           }
           }
 
 
-          String value = configHelper.getPropertyValueFromStackDefenitions(cluster, configType, CONTENT_FIELD_NAME);
-          updateConfigurationProperties(configType, Collections.singletonMap(CONTENT_FIELD_NAME, value), true, true);
+          updateConfigurationPropertiesWithValuesFromXml(configType, Collections.singleton(CONTENT_FIELD_NAME), false, true);
         }
         }
       }
       }
     }
     }
@@ -1129,7 +1110,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
         }
         }
 
 
         for (Entry<String, Map<String, String>> newProperty : newProperties.entrySet()) {
         for (Entry<String, Map<String, String>> newProperty : newProperties.entrySet()) {
-          updateConfigurationProperties(newProperty.getKey(), newProperty.getValue(), true, true);
+          updateConfigurationProperties(newProperty.getKey(), newProperty.getValue(), false, true);
         }
         }
 
 
         // if have some custom properties, for own services etc., leave that as it was
         // if have some custom properties, for own services etc., leave that as it was

+ 1 - 1
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog161Test.java

@@ -172,7 +172,7 @@ public class UpgradeCatalog161Test {
             "reducer=1000000000\npig.exec.reducers.max=999\n\n# Temporary location to store the intermediate " +
             "reducer=1000000000\npig.exec.reducers.max=999\n\n# Temporary location to store the intermediate " +
             "data.\npig.temp.dir=/tmp/\n\n# Threshold for merging FRJoin fragment files\npig.files.concatenation." +
             "data.\npig.temp.dir=/tmp/\n\n# Threshold for merging FRJoin fragment files\npig.files.concatenation." +
             "threshold=100\npig.optimistic.files.concatenation=false;\n\npig.disable.counter=false\n\n" +
             "threshold=100\npig.optimistic.files.concatenation=false;\n\npig.disable.counter=false\n\n" +
-            "hcat.bin=/usr/bin/hcat"), true, true);
+            "hcat.bin=/usr/bin/hcat"), true, false);
     expectLastCall();
     expectLastCall();
 
 
     replay(upgradeCatalog, dbAccessor, configuration, injector, provider, em,
     replay(upgradeCatalog, dbAccessor, configuration, injector, provider, em,

+ 6 - 29
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java

@@ -423,9 +423,11 @@ public class UpgradeCatalog170Test {
     Method m = AbstractUpgradeCatalog.class.getDeclaredMethod
     Method m = AbstractUpgradeCatalog.class.getDeclaredMethod
         ("updateConfigurationProperties", String.class, Map.class, boolean.class, boolean.class);
         ("updateConfigurationProperties", String.class, Map.class, boolean.class, boolean.class);
     Method n = AbstractUpgradeCatalog.class.getDeclaredMethod("getEntityManagerProvider");
     Method n = AbstractUpgradeCatalog.class.getDeclaredMethod("getEntityManagerProvider");
-
+    Method l = AbstractUpgradeCatalog.class.getDeclaredMethod
+        ("addNewConfigurationsFromXml");
+    
     UpgradeCatalog170 upgradeCatalog = createMockBuilder(UpgradeCatalog170.class)
     UpgradeCatalog170 upgradeCatalog = createMockBuilder(UpgradeCatalog170.class)
-      .addMockedMethod(m).addMockedMethod(n).createMock();
+      .addMockedMethod(m).addMockedMethod(n).addMockedMethod(l).createMock();
 
 
     List<ConfigGroupConfigMappingEntity> configGroupConfigMappingEntities =
     List<ConfigGroupConfigMappingEntity> configGroupConfigMappingEntities =
             new ArrayList<ConfigGroupConfigMappingEntity>();
             new ArrayList<ConfigGroupConfigMappingEntity>();
@@ -456,34 +458,10 @@ public class UpgradeCatalog170Test {
     contentOfHadoopEnv.put("content", "env file contents");
     contentOfHadoopEnv.put("content", "env file contents");
 
 
     upgradeCatalog.updateConfigurationProperties("hadoop-env",
     upgradeCatalog.updateConfigurationProperties("hadoop-env",
-        globalConfigs, true, true);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("hadoop-env",
-        contentOfHadoopEnv, true, true);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("hbase-env",
-        Collections.singletonMap("hbase_regionserver_xmn_max", "512"), false, false);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("hbase-env",
-        Collections.singletonMap("hbase_regionserver_xmn_ratio", "0.2"), false, false);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("yarn-env",
-        Collections.singletonMap("min_user_id", "1000"), false, false);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("sqoop-env", Collections.singletonMap("sqoop_user", "sqoop"), false, false);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("hadoop-env",
-            Collections.singletonMap("hadoop_root_logger", "INFO,RFA"), false, false);
+        globalConfigs, false, true);
     expectLastCall();
     expectLastCall();
 
 
-    upgradeCatalog.updateConfigurationProperties("oozie-env",
-            Collections.singletonMap("oozie_admin_port", "11001"), false, false);
+    upgradeCatalog.addNewConfigurationsFromXml();
     expectLastCall();
     expectLastCall();
 
 
     expect(dbAccessor.executeSelect("SELECT role_name, user_id FROM user_roles")).andReturn(userRolesResultSet).once();
     expect(dbAccessor.executeSelect("SELECT role_name, user_id FROM user_roles")).andReturn(userRolesResultSet).once();
@@ -521,7 +499,6 @@ public class UpgradeCatalog170Test {
     expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "smokeuser_keytab", "c1")).andReturn(new HashSet<String>()).once();
     expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "smokeuser_keytab", "c1")).andReturn(new HashSet<String>()).once();
     expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "content", "c1")).andReturn(envDicts).once();
     expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "content", "c1")).andReturn(envDicts).once();
     expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "dtnode_heapsize", "c1")).andReturn(configTypes).once();
     expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "dtnode_heapsize", "c1")).andReturn(configTypes).once();
-    expect(configHelper.getPropertyValueFromStackDefenitions(cluster, "hadoop-env", "content")).andReturn("env file contents").once();
 
 
     expect(injector.getInstance(ConfigGroupConfigMappingDAO.class)).andReturn(configGroupConfigMappingDAO).anyTimes();
     expect(injector.getInstance(ConfigGroupConfigMappingDAO.class)).andReturn(configGroupConfigMappingDAO).anyTimes();
     expect(injector.getInstance(UserDAO.class)).andReturn(userDAO).anyTimes();
     expect(injector.getInstance(UserDAO.class)).andReturn(userDAO).anyTimes();