Bläddra i källkod

AMBARI-6635. Fix upgrade to respect -env (aonishuk)

Andrew Onishuk 11 år sedan
förälder
incheckning
bdc6a78980

+ 8 - 0
ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java

@@ -93,6 +93,14 @@ public class ClusterDAO {
   public void createConfig(ClusterConfigEntity entity) {
     entityManagerProvider.get().persist(entity);
   }
+  
+  /**
+   * Remove a cluster configuration in the DB.
+   */
+  @Transactional
+  public void removeConfig(ClusterConfigEntity entity) {
+    entityManagerProvider.get().remove(entity);
+  }
 
   /**
    * Retrieve entity data from DB

+ 110 - 2
ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java

@@ -33,12 +33,19 @@ import java.util.concurrent.TimeUnit;
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
 import com.google.inject.Singleton;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 
 import com.google.inject.Inject;
-import org.apache.ambari.server.configuration.Configuration;
+import com.google.inject.persist.Transactional;
 
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.ConfigurationRequest;
+import org.apache.ambari.server.orm.dao.ClusterDAO;
+import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
+import org.apache.ambari.server.orm.entities.ClusterConfigEntityPK;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 /**
@@ -49,6 +56,7 @@ public class ConfigHelper {
 
   private Clusters clusters = null;
   private AmbariMetaInfo ambariMetaInfo = null;
+  private ClusterDAO clusterDAO = null;
   private static final String DELETED = "DELETED_";
   public static final String CLUSTER_DEFAULT_TAG = "tag";
   private final boolean STALE_CONFIGS_CACHE_ENABLED;
@@ -59,9 +67,10 @@ public class ConfigHelper {
     LoggerFactory.getLogger(ConfigHelper.class);
 
   @Inject
-  public ConfigHelper(Clusters c, AmbariMetaInfo metaInfo, Configuration configuration) {
+  public ConfigHelper(Clusters c, AmbariMetaInfo metaInfo, Configuration configuration, ClusterDAO clusterDAO) {
     clusters = c;
     ambariMetaInfo = metaInfo;
+    this.clusterDAO = clusterDAO;
     STALE_CONFIGS_CACHE_ENABLED = configuration.isStaleConfigCacheEnabled();
     staleConfigsCache = CacheBuilder.newBuilder().
       expireAfterWrite(STALE_CONFIGS_CACHE_EXPIRATION_TIME, TimeUnit.SECONDS).build();
@@ -355,6 +364,105 @@ public class ConfigHelper {
   public void invalidateStaleConfigsCache(ServiceComponentHost sch) {
     staleConfigsCache.invalidate(sch);
   }
+  
+  /**
+   * Remove configs by type
+   * @param type config Type
+   */
+  @Transactional
+  public void removeConfigsByType(Cluster cluster, String type) {
+    Set<String> globalVersions = cluster.getConfigsByType(type).keySet();
+    
+    for(String version:globalVersions) {
+      ClusterConfigEntityPK clusterConfigEntityPK = new ClusterConfigEntityPK();
+      clusterConfigEntityPK.setClusterId(cluster.getClusterId());
+      clusterConfigEntityPK.setTag(version);
+      clusterConfigEntityPK.setType(type);
+      ClusterConfigEntity clusterConfigEntity = clusterDAO.findConfig
+        (clusterConfigEntityPK);
+      
+      clusterDAO.removeConfig(clusterConfigEntity);
+    }
+  }
+  
+  /**
+   * Gets all the config dictionary where property with the given name is present in stack definitions
+   * @param cluster
+   * @param propertyName
+   */
+  public Set<String> findConfigTypesByPropertyName(Cluster cluster, String propertyName) throws AmbariException {
+    StackId stackId = cluster.getCurrentStackVersion();
+    StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+        stackId.getStackVersion());
+    
+    Set<String> result = new HashSet<String>();
+    
+    for(ServiceInfo serviceInfo:stack.getServices()) {
+      // skip not installed services
+      if(!cluster.getServices().containsKey(serviceInfo.getName())) {
+        continue;
+      }
+      
+      Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), serviceInfo.getName());
+      
+      for (PropertyInfo stackProperty : stackProperties) {
+        if(stackProperty.getName().equals(propertyName)) {
+          int extIndex = stackProperty.getFilename().indexOf(AmbariMetaInfo.SERVICE_CONFIG_FILE_NAME_POSTFIX);
+          String configType = stackProperty.getFilename().substring(0, extIndex);
+          
+          result.add(configType);
+        }
+      }
+      
+    }
+    
+    return result;
+  }
+  
+  public String getPropertyValueFromStackDefenitions(Cluster cluster, String configType, String propertyName) throws AmbariException {
+    StackId stackId = cluster.getCurrentStackVersion();
+    StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+        stackId.getStackVersion());
+    
+    for(ServiceInfo serviceInfo:stack.getServices()) {     
+      Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), serviceInfo.getName());
+      
+      for (PropertyInfo stackProperty : stackProperties) {
+        int extIndex = stackProperty.getFilename().indexOf(AmbariMetaInfo.SERVICE_CONFIG_FILE_NAME_POSTFIX);
+        String stackPropertyConfigType = stackProperty.getFilename().substring(0, extIndex);
+        
+        if(stackProperty.getName().equals(propertyName) && stackPropertyConfigType.equals(configType)) {
+          return stackProperty.getValue();
+        }
+      }
+      
+    }
+    
+    return null;
+  }
+  
+  public void createConfigType(Cluster cluster, AmbariManagementController ambariManagementController, 
+      String configType, Map<String, String> properties, String authName) throws AmbariException {
+    String tag;
+    if(cluster.getConfigsByType(configType) == null) {
+      tag = "version1";
+    } else {
+      tag = "version" + System.currentTimeMillis();
+    }
+    
+    ConfigurationRequest cr = new ConfigurationRequest();
+    cr.setClusterName(cluster.getClusterName());
+    cr.setVersionTag(tag);
+    cr.setType(configType);
+    cr.setProperties(properties);
+    ambariManagementController.createConfiguration(cr);
+    
+    Config baseConfig = cluster.getConfig(cr.getType(), cr.getVersionTag());
+    
+    if (baseConfig != null) {
+      cluster.addDesiredConfig(authName, baseConfig);
+    }
+  }
 
   private boolean calculateIsStaleConfigs(ServiceComponentHost sch) throws AmbariException {
 

+ 20 - 11
ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java

@@ -22,7 +22,9 @@ import com.google.inject.Inject;
 import com.google.inject.Injector;
 import com.google.inject.Provider;
 import com.google.inject.persist.Transactional;
+
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.ConfigurationRequest;
@@ -32,11 +34,13 @@ import org.apache.ambari.server.orm.entities.MetainfoEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.DesiredConfig;
 import org.apache.ambari.server.utils.VersionUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import javax.persistence.EntityManager;
+
 import java.sql.SQLException;
 import java.text.MessageFormat;
 import java.util.Comparator;
@@ -179,7 +183,7 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
    * @param properties Map of key value pairs to add / update.
    */
   protected void updateConfigurationProperties(String configType,
-        Map<String, String> properties, boolean updateIfExists) throws
+        Map<String, String> properties, boolean updateIfExists, boolean createNewConfigType) throws
     AmbariException {
     AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
     String newTag = "version" + System.currentTimeMillis();
@@ -192,18 +196,22 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
 
     if (clusterMap != null && !clusterMap.isEmpty()) {
       for (Cluster cluster : clusterMap.values()) {
-        Config oldConfig = cluster.getDesiredConfigByType(configType);
-        if (oldConfig == null) {
-          LOG.info("Config " + configType + " not found. Assuming service not installed. " +
-              "Skipping configuration properties update");
-          return;
-        }
-
         if (properties != null) {
           Map<String, Config> all = cluster.getConfigsByType(configType);
           if (all == null || !all.containsKey(newTag) || properties.size() > 0) {
-
-            Map<String, String> oldConfigProperties = oldConfig.getProperties();
+            Map<String, String> oldConfigProperties;
+            Config oldConfig = cluster.getDesiredConfigByType(configType);
+            
+            if (oldConfig == null && !createNewConfigType) {
+              LOG.info("Config " + configType + " not found. Assuming service not installed. " +
+                  "Skipping configuration properties update");
+              return;
+            } else if (oldConfig == null) {
+              oldConfigProperties = new HashMap<String, String>();
+              newTag = "version1";
+            } else {
+              oldConfigProperties = oldConfig.getProperties();
+            }
 
             Map<String, String> mergedProperties =
               mergeProperties(oldConfigProperties, properties, updateIfExists);
@@ -224,11 +232,12 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
                 String authName = "ambari-upgrade";
 
                 if (cluster.addDesiredConfig(authName, baseConfig)) {
+                  String oldConfigString = (oldConfig != null) ? " from='" + oldConfig.getVersionTag() + "'" : "";
                   LOG.info("cluster '" + cluster.getClusterName() + "' "
                     + "changed by: '" + authName + "'; "
                     + "type='" + baseConfig.getType() + "' "
                     + "tag='" + baseConfig.getVersionTag() + "'"
-                    + " from='" + oldConfig.getVersionTag() + "'");
+                    + oldConfigString);
                 }
               }
             } else {

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java

@@ -699,7 +699,7 @@ public class UpgradeCatalog150 extends AbstractUpgradeCatalog {
               configEntityPK.setType(configType);
               configEntityPK.setTag(defaultVersionTag);
               ClusterConfigEntity configEntity = clusterDAO.findConfig(configEntityPK);
-
+              
               if (configEntity == null) {
                 String filename = configType + ".xml";
                 Map<String, String> properties = new HashMap<String, String>();

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog160.java

@@ -111,7 +111,7 @@ public class UpgradeCatalog160 extends AbstractUpgradeCatalog {
         "VALUES('viewentity_id_seq', 0)", true);
 
     // Add missing property for YARN
-    updateConfigurationProperties("global", Collections.singletonMap("jobhistory_heapsize", "900"), false);
+    updateConfigurationProperties("global", Collections.singletonMap("jobhistory_heapsize", "900"), false, false);
   }
 
   @Override

+ 4 - 4
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog161.java

@@ -297,10 +297,10 @@ public class UpgradeCatalog161 extends AbstractUpgradeCatalog {
   }
   
   protected void addMissingConfigs() throws AmbariException {
-    updateConfigurationProperties("hbase-site", Collections.singletonMap("hbase.regionserver.info.port", "60030"), false);
-    updateConfigurationProperties("hbase-site", Collections.singletonMap("hbase.master.info.port", "60010"), false);
-    updateConfigurationProperties("global", Collections.singletonMap("oozie_admin_port", "11001"), false);
-    updateConfigurationProperties("hive-site", Collections.singletonMap("hive.heapsize", "1024"), false);
+    updateConfigurationProperties("hbase-site", Collections.singletonMap("hbase.regionserver.info.port", "60030"), false, false);
+    updateConfigurationProperties("hbase-site", Collections.singletonMap("hbase.master.info.port", "60010"), false, false);
+    updateConfigurationProperties("global", Collections.singletonMap("oozie_admin_port", "11001"), false, false);
+    updateConfigurationProperties("hive-site", Collections.singletonMap("hive.heapsize", "1024"), false, false);
   }
 
   @Override

+ 136 - 2
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java

@@ -20,14 +20,25 @@ package org.apache.ambari.server.upgrade;
 
 import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
 
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.ConfigurationRequest;
 import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigHelper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.ambari.server.configuration.Configuration;
 
 import com.google.inject.Inject;
 import com.google.inject.Injector;
@@ -36,6 +47,9 @@ import com.google.inject.Injector;
  * Upgrade catalog for version 1.7.0.
  */
 public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
+  private Injector injector;
+  private static final String CONTENT_FIELD_NAME = "content";
+  private static final String ENV_CONFIGS_POSTFIX = "-env";
 
   //SourceVersion is only for book-keeping purpos
   @Override
@@ -59,6 +73,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
   @Inject
   public UpgradeCatalog170(Injector injector) {
     super(injector);
+    this.injector = injector;
   }
 
 
@@ -139,7 +154,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
     dbAccessor.createTable("adminprivilege", columns, "privilege_id");
 
     dbAccessor.executeQuery("insert into adminprivilege (privilege_id, permission_id, resource_id, principal_id)\n" +
-        "  select 1, 1, 1, 1", true);
+        "  select 1, 1, 1, injector1", true);
 
 
     DBAccessor.DBColumnInfo clusterConfigAttributesColumn = new DBAccessor.DBColumnInfo(
@@ -207,5 +222,124 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
 
     dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
         + valueColumnName + ") " + "VALUES('alert_notice_id_seq', 0)", true);
+    
+    moveGlobalsToEnv();
+    addEnvContentFields();
+  }
+  
+  protected void addEnvContentFields() throws AmbariException {
+    ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
+    AmbariManagementController ambariManagementController = injector.getInstance(
+        AmbariManagementController.class);
+
+    Clusters clusters = ambariManagementController.getClusters();
+    if (clusters == null) {
+      return;
+    }
+ 
+    Map<String, Cluster> clusterMap = clusters.getClusters();
+
+    if (clusterMap != null && !clusterMap.isEmpty()) {
+      for (final Cluster cluster : clusterMap.values()) {
+        Set<String> configTypes = configHelper.findConfigTypesByPropertyName(cluster, CONTENT_FIELD_NAME);  
+        
+        for(String configType:configTypes) {
+          if(!configType.endsWith(ENV_CONFIGS_POSTFIX)) {
+            continue;
+          }
+          
+          String value = configHelper.getPropertyValueFromStackDefenitions(cluster, configType, CONTENT_FIELD_NAME);
+          updateConfigurationProperties(configType, Collections.singletonMap(CONTENT_FIELD_NAME, value), true, true);
+        }
+      }
+    }
+  }
+  
+  protected void moveGlobalsToEnv() throws AmbariException {
+    ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
+    
+    AmbariManagementController ambariManagementController = injector.getInstance(
+        AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+    if (clusters == null) {
+      return;
+    }
+    Map<String, Cluster> clusterMap = clusters.getClusters();
+
+    if (clusterMap != null && !clusterMap.isEmpty()) {
+      for (final Cluster cluster : clusterMap.values()) {
+        Config config = cluster.getDesiredConfigByType(Configuration.GLOBAL_CONFIG_TAG);
+        if (config == null) {
+          LOG.info("Config " + Configuration.GLOBAL_CONFIG_TAG + " not found. Assuming upgrade already done.");
+          return;
+        }
+        
+        Map<String, Map<String, String>> newProperties = new HashMap<String, Map<String, String>>();
+        Map<String, String> globalProperites = config.getProperties();
+        Map<String, String> unmappedGlobalProperties = new HashMap<String, String>();
+        
+        for (Map.Entry<String, String> property : globalProperites.entrySet()) {
+          String propertyName = property.getKey();
+          String propertyValue = property.getValue();
+          
+          Set<String> newConfigTypes = configHelper.findConfigTypesByPropertyName(cluster, propertyName);
+          // if it's custom user service global.xml can be still there.
+          newConfigTypes.remove(Configuration.GLOBAL_CONFIG_TAG);
+          
+          String newConfigType = null;
+          if(newConfigTypes.size() > 0) {
+            newConfigType = newConfigTypes.iterator().next();
+          } else {
+            newConfigType = getAdditionalMappingGlobalToEnv().get(propertyName);
+          }
+          
+          if(newConfigType==null) {
+            LOG.warn("Cannot find where to map " + propertyName + " from " + Configuration.GLOBAL_CONFIG_TAG +
+                " (value="+propertyValue+")");
+            unmappedGlobalProperties.put(propertyName, propertyValue);
+            continue;
+          }
+          
+          LOG.info("Mapping config " + propertyName + " from " + Configuration.GLOBAL_CONFIG_TAG + 
+              " to " + newConfigType +
+              " (value="+propertyValue+")");
+          
+          if(!newProperties.containsKey(newConfigType)) {
+            newProperties.put(newConfigType, new HashMap<String, String>());
+          }
+          newProperties.get(newConfigType).put(propertyName, propertyValue);
+        }
+        
+        for (Entry<String, Map<String, String>> newProperty : newProperties.entrySet()) {
+          updateConfigurationProperties(newProperty.getKey(), newProperty.getValue(), true, true);
+        }
+        
+        // if have some custom properties, for own services etc., leave that as it was
+        if(unmappedGlobalProperties.size() != 0) {
+          LOG.info("Not deleting globals because have custom properties");
+          configHelper.createConfigType(cluster, ambariManagementController, Configuration.GLOBAL_CONFIG_TAG, unmappedGlobalProperties, "ambari-upgrade");
+        } else {
+          configHelper.removeConfigsByType(cluster, Configuration.GLOBAL_CONFIG_TAG);
+        }
+      }
+    }
+  }
+  
+  public static Map<String, String> getAdditionalMappingGlobalToEnv() {
+    Map<String, String> result = new HashMap<String, String>();
+    
+    result.put("smokeuser_keytab","hadoop-env");
+    result.put("hdfs_user_keytab","hadoop-env");
+    result.put("kerberos_domain","hadoop-env");
+    result.put("hbase_user_keytab","hbase-env");
+    result.put("nagios_principal_name","nagios-env");
+    result.put("nagios_keytab_path","nagios-env");
+    result.put("oozie_keytab","oozie-env");
+    result.put("zookeeper_principal_name","zookeeper-env");
+    result.put("zookeeper_keytab_path","zookeeper-env");
+    result.put("storm_principal_name","storm-env");
+    result.put("storm_keytab","storm-env");
+    
+    return result;
   }
 }

+ 1 - 1
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog160Test.java

@@ -97,7 +97,7 @@ public class UpgradeCatalog160Test {
     expect(configuration.getDatabaseUrl()).andReturn(Configuration.JDBC_IN_MEMORY_URL).anyTimes();
 
     upgradeCatalog.updateConfigurationProperties("global",
-      Collections.singletonMap("jobhistory_heapsize", "900"), false);
+      Collections.singletonMap("jobhistory_heapsize", "900"), false, false);
 
     expectLastCall();
 

+ 4 - 4
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog161Test.java

@@ -140,19 +140,19 @@ public class UpgradeCatalog161Test {
     expect(query.executeUpdate()).andReturn(0);
     
     upgradeCatalog.updateConfigurationProperties("hbase-site",
-        Collections.singletonMap("hbase.regionserver.info.port", "60030"), false);
+        Collections.singletonMap("hbase.regionserver.info.port", "60030"), false, false);
     expectLastCall();
 
     upgradeCatalog.updateConfigurationProperties("hbase-site",
-        Collections.singletonMap("hbase.master.info.port", "60010"), false);
+        Collections.singletonMap("hbase.master.info.port", "60010"), false, false);
     expectLastCall();
     
     upgradeCatalog.updateConfigurationProperties("global",
-        Collections.singletonMap("oozie_admin_port", "11001"), false);
+        Collections.singletonMap("oozie_admin_port", "11001"), false, false);
     expectLastCall();
     
     upgradeCatalog.updateConfigurationProperties("hive-site",
-        Collections.singletonMap("hive.heapsize", "1024"), false);
+        Collections.singletonMap("hive.heapsize", "1024"), false, false);
     expectLastCall();
 
     replay(upgradeCatalog, dbAccessor, configuration, injector, provider, em,

+ 3 - 3
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogTest.java

@@ -159,7 +159,7 @@ public class UpgradeCatalogTest {
     // Add new
     UpgradeCatalog149 testCatalog = injector.getInstance(UpgradeCatalog149.class);
     testCatalog.updateConfigurationProperties("global",
-      Collections.singletonMap("x", "y"), false);
+      Collections.singletonMap("x", "y"), false, false);
     config = cluster.getDesiredConfigByType("global");
     String version = config.getVersionTag();
     Assert.assertNotNull(config);
@@ -169,7 +169,7 @@ public class UpgradeCatalogTest {
 
     // Override value
     testCatalog.updateConfigurationProperties("global",
-      Collections.singletonMap("x", "z"), true);
+      Collections.singletonMap("x", "z"), true, false);
     config = cluster.getDesiredConfigByType("global");
     Assert.assertNotNull(config);
     Assert.assertNotSame(version, config.getVersionTag());
@@ -179,7 +179,7 @@ public class UpgradeCatalogTest {
 
     // Retain original
     testCatalog.updateConfigurationProperties("global",
-      Collections.singletonMap("x", "y"), false);
+      Collections.singletonMap("x", "y"), false, false);
     config = cluster.getDesiredConfigByType("global");
     Assert.assertNotNull(config);
     Assert.assertSame(version, config.getVersionTag());