Bladeren bron

AMBARI-12021 Upgrade scripts from 2.0.x to 2.1.0 should handle moving of 'ranger-hive-plugin-enabled' into 'hive_security_authorization' (dsen)

Dmytro Sen 10 jaren geleden
bovenliggende
commit
61fc03f174

+ 32 - 1
ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java

@@ -356,8 +356,19 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
     }
     }
   }
   }
 
 
+  /**
+   * Update properties for the cluster
+   * @param cluster cluster object
+   * @param configType config to be updated
+   * @param properties properties to be added or updated. Couldn't be <code>null</code>, but could be empty.
+   * @param removePropertiesList properties to be removed. Could be <code>null</code>
+   * @param updateIfExists
+   * @param createNewConfigType
+   * @throws AmbariException
+   */
   protected void updateConfigurationPropertiesForCluster(Cluster cluster, String configType,
   protected void updateConfigurationPropertiesForCluster(Cluster cluster, String configType,
-      Map<String, String> properties, boolean updateIfExists, boolean createNewConfigType) throws AmbariException {
+        Map<String, String> properties, Set<String> removePropertiesList, boolean updateIfExists,
+        boolean createNewConfigType) throws AmbariException {
     AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
     AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
     String newTag = "version" + System.currentTimeMillis();
     String newTag = "version" + System.currentTimeMillis();
 
 
@@ -381,6 +392,10 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
         Map<String, String> mergedProperties =
         Map<String, String> mergedProperties =
           mergeProperties(oldConfigProperties, properties, updateIfExists);
           mergeProperties(oldConfigProperties, properties, updateIfExists);
 
 
+        if (removePropertiesList != null) {
+          mergedProperties = removeProperties(mergedProperties, removePropertiesList);
+        }
+
         if (!Maps.difference(oldConfigProperties, mergedProperties).areEqual()) {
         if (!Maps.difference(oldConfigProperties, mergedProperties).areEqual()) {
           LOG.info("Applying configuration with tag '{}' to " +
           LOG.info("Applying configuration with tag '{}' to " +
             "cluster '{}'", newTag, cluster.getClusterName());
             "cluster '{}'", newTag, cluster.getClusterName());
@@ -412,6 +427,11 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
     }
     }
   }
   }
 
 
+  protected void updateConfigurationPropertiesForCluster(Cluster cluster, String configType,
+        Map<String, String> properties, boolean updateIfExists, boolean createNewConfigType) throws AmbariException {
+    updateConfigurationPropertiesForCluster(cluster, configType, properties, null, updateIfExists, createNewConfigType);
+  }
+
   /**
   /**
    * Create a new cluster scoped configuration with the new properties added
    * Create a new cluster scoped configuration with the new properties added
    * to the existing set of properties.
    * to the existing set of properties.
@@ -450,6 +470,17 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
     return properties;
     return properties;
   }
   }
 
 
+  private Map<String, String> removeProperties(Map<String, String> originalProperties, Set<String> removeList){
+    Map<String, String> properties = new HashMap<String, String>();
+    properties.putAll(originalProperties);
+    for (String removeProperty: removeList){
+      if (originalProperties.containsKey(removeProperty)){
+        properties.remove(removeProperty);
+      }
+    }
+    return properties;
+  }
+
   @Override
   @Override
   public void upgradeSchema() throws AmbariException, SQLException {
   public void upgradeSchema() throws AmbariException, SQLException {
     DatabaseType databaseType = configuration.getDatabaseType();
     DatabaseType databaseType = configuration.getDatabaseType();

+ 36 - 2
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java

@@ -42,7 +42,6 @@ import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.alert.AlertDefinitionFactory;
 import org.apache.ambari.server.state.alert.AlertDefinitionFactory;
@@ -66,6 +65,7 @@ import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Set;
 import java.util.Set;
@@ -1118,7 +1118,7 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
       public void run() {
       public void run() {
         EntityManager em = getEntityManagerProvider().get();
         EntityManager em = getEntityManagerProvider().get();
         Query nativeQuery = em.createNativeQuery("UPDATE alert_definition SET alert_source=?1 WHERE " +
         Query nativeQuery = em.createNativeQuery("UPDATE alert_definition SET alert_source=?1 WHERE " +
-                "definition_name=?2");
+                                                   "definition_name=?2");
         nativeQuery.setParameter(1, source);
         nativeQuery.setParameter(1, source);
         nativeQuery.setParameter(2, alertName);
         nativeQuery.setParameter(2, alertName);
         nativeQuery.executeUpdate();
         nativeQuery.executeUpdate();
@@ -1130,6 +1130,40 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
     updateHiveConfigs();
     updateHiveConfigs();
     updateHdfsConfigs();
     updateHdfsConfigs();
     updateStormConfigs();
     updateStormConfigs();
+    updateRangerHiveConfigs();
+  }
+
+  protected void updateRangerHiveConfigs() throws AmbariException{
+    AmbariManagementController ambariManagementController = injector.getInstance(
+            AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+      if (clusterMap != null && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+          Config RangerHiveConfig = cluster.getDesiredConfigByType("ranger-hive-plugin-properties");
+          if (RangerHiveConfig != null
+                  && RangerHiveConfig.getProperties().containsKey("ranger-hive-plugin-enabled")
+                  && cluster.getDesiredConfigByType("hive-env") != null) {
+            Map<String, String> newHiveEnvProperties = new HashMap<String, String>();
+            Set<String> removeRangerHiveProperties = new HashSet<String>();
+            removeRangerHiveProperties.add("ranger-hive-plugin-enabled");
+
+            if (RangerHiveConfig.getProperties().get("ranger-hive-plugin-enabled") != null
+                    && RangerHiveConfig.getProperties().get("ranger-hive-plugin-enabled").equalsIgnoreCase("yes")) {
+              newHiveEnvProperties.put("hive_security_authorization", "Ranger");
+            } else {
+              newHiveEnvProperties.put("hive_security_authorization", "None");
+            }
+            boolean updateProperty = cluster.getDesiredConfigByType("hive-env").getProperties().containsKey("hive_security_authorization");
+            updateConfigurationPropertiesForCluster(cluster, "hive-env", newHiveEnvProperties, updateProperty, true);
+            updateConfigurationPropertiesForCluster(cluster, "ranger-hive-plugin-properties", new HashMap<String, String>(),
+                    removeRangerHiveProperties, false, true);
+          }
+        }
+      }
+    }
   }
   }
 
 
   protected void updateHdfsConfigs() throws AmbariException {
   protected void updateHdfsConfigs() throws AmbariException {

+ 42 - 0
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java

@@ -214,6 +214,48 @@ public class UpgradeCatalog210Test {
     verify(upgradeCatalog210);
     verify(upgradeCatalog210);
   }
   }
 
 
+  @Test
+  public void testUpdateRangerHiveConfigs() throws Exception{
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    final AmbariManagementController  mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+    final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
+
+    final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
+    final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+
+    final Config mockRangerPlugin = easyMockSupport.createNiceMock(Config.class);
+    final Config mockHiveEnv = easyMockSupport.createNiceMock(Config.class);
+
+    final Map<String, String> propertiesExpectedRangerPlugin = new HashMap<String, String>();
+    propertiesExpectedRangerPlugin.put("ranger-hive-plugin-enabled", "yes");
+    final Map<String, String> propertiesExpectedHiveEnv = new HashMap<String, String>();
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+        bind(ConfigHelper.class).toInstance(mockConfigHelper);
+        bind(Clusters.class).toInstance(mockClusters);
+
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+      }
+    });
+
+    expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
+    expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", mockClusterExpected);
+    }}).once();
+
+    expect(mockClusterExpected.getDesiredConfigByType("ranger-hive-plugin-properties")).andReturn(mockRangerPlugin).atLeastOnce();
+    expect(mockClusterExpected.getDesiredConfigByType("hive-env")).andReturn(mockHiveEnv).atLeastOnce();
+    expect(mockRangerPlugin.getProperties()).andReturn(propertiesExpectedRangerPlugin).anyTimes();
+    expect(mockHiveEnv.getProperties()).andReturn(propertiesExpectedHiveEnv).anyTimes();
+
+    easyMockSupport.replayAll();
+    mockInjector.getInstance(UpgradeCatalog210.class).updateRangerHiveConfigs();
+    easyMockSupport.verifyAll();
+  }
+
   @Test
   @Test
   public void testInitializeClusterAndServiceWidgets() throws Exception {
   public void testInitializeClusterAndServiceWidgets() throws Exception {
     final AmbariManagementController controller = createStrictMock(AmbariManagementController.class);
     final AmbariManagementController controller = createStrictMock(AmbariManagementController.class);