Browse Source

Merge remote-tracking branch 'remotes/alt/trunk' into branch-3.0-perf-unchanged. After-merge fixes.

Myroslav Papirkovskyi 7 years ago
parent
commit
f62e103ced
22 changed files with 227 additions and 421 deletions
  1. 2 1
      ambari-common/src/main/python/resource_management/libraries/functions/log_process_information.py
  2. 0 242
      ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
  3. 8 5
      ambari-server/src/main/java/org/apache/ambari/server/configuration/spring/ApiSecurityConfig.java
  4. 18 7
      ambari-server/src/main/java/org/apache/ambari/server/configuration/spring/GuiceBeansConfig.java
  5. 16 2
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
  6. 22 5
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
  7. 59 63
      ambari-server/src/main/java/org/apache/ambari/server/events/publishers/AgentCommandsPublisher.java
  8. 1 3
      ambari-server/src/main/java/org/apache/ambari/server/security/authentication/AmbariBasicAuthenticationFilter.java
  9. 2 2
      ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertUri.java
  10. 3 4
      ambari-server/src/main/java/org/apache/ambari/server/state/alert/Reporting.java
  11. 18 0
      ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
  12. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
  13. 4 5
      ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
  14. 2 1
      ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
  15. 4 6
      ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
  16. 1 1
      ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
  17. 1 1
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
  18. 4 6
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserAuthenticationSourceResourceProviderTest.java
  19. 12 6
      ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
  20. 1 1
      ambari-server/src/test/java/org/apache/ambari/server/security/authentication/AmbariBasicAuthenticationFilterTest.java
  21. 1 0
      ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
  22. 47 59
      ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java

+ 2 - 1
ambari-common/src/main/python/resource_management/libraries/functions/log_process_information.py

@@ -19,7 +19,6 @@ limitations under the License.
 Ambari Agent
 
 """
-from ambari_commons.shell import shellRunner
 from ambari_commons.os_check import OSCheck
 
 __all__ = ["log_process_information"]
@@ -28,6 +27,8 @@ def log_process_information(logger):
   """
   Check if certain configuration sent from the server has been received.
   """
+
+  from ambari_commons.shell import shellRunner
   if OSCheck.is_windows_family():
     cmd_list = ["WMIC path win32_process get Caption,Processid,Commandline", "netstat -an"]
   else:

+ 0 - 242
ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java

@@ -37,16 +37,12 @@ import java.util.Map;
 import java.util.Objects;
 import java.util.Scanner;
 import java.util.Set;
-import java.util.function.Function;
 
 import javax.xml.bind.JAXBException;
 
-import org.apache.ambari.annotations.Experimental;
-import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ParentObjectNotFoundException;
 import org.apache.ambari.server.StackAccessException;
-import org.apache.ambari.server.agent.CommandRepository;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.RootService;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
@@ -59,9 +55,6 @@ import org.apache.ambari.server.metadata.AmbariServiceAlertDefinitions;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.MetainfoDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
-import org.apache.ambari.server.orm.entities.OperatingSystemEntity;
-import org.apache.ambari.server.orm.entities.RepositoryEntity;
-import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.stack.StackManager;
 import org.apache.ambari.server.stack.StackManagerFactory;
 import org.apache.ambari.server.state.Cluster;
@@ -69,12 +62,10 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.DependencyInfo;
 import org.apache.ambari.server.state.ExtensionInfo;
-import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.OperatingSystemInfo;
 import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.Service;
-import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
@@ -98,9 +89,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.gson.Gson;
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
 import com.google.gson.reflect.TypeToken;
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
@@ -1447,218 +1435,6 @@ public class AmbariMetaInfo {
 
     return versionDefinitions;
   }
-  /**
-   * Get repository info given a cluster and host.
-   *
-   * @param cluster  the cluster
-   * @param host     the host
-   *
-   * @return the repo info
-   *
-   * @throws AmbariException if the repository information can not be obtained
-  public String getRepoInfoString(Cluster cluster, Host host) throws AmbariException {
-
-    return getRepoInfoString(cluster, host.getOsType(), host.getOsFamily(), host.getHostName());
-  }*/
-
-  public String getRepoInfoString(Cluster cluster, ServiceComponent component, Host host) throws AmbariException {
-    return gson.toJson(getCommandRepository(cluster, component, host));
-  }
-
-  /**
-   * Get repository info given a cluster and host.
-   *
-   * @param cluster  the cluster
-   * @param host     the host
-   *
-   * @return the repo info
-   *
-   * @deprecated use {@link #getCommandRepository(Cluster, ServiceComponent, Host)} instead.
-   * @throws AmbariException if the repository information can not be obtained
-   */
-  @Deprecated
-  public String getRepoInfo(Cluster cluster, ServiceComponent component, Host host) throws AmbariException {
-
-    Function<List<RepositoryInfo>, JsonArray> function = new Function<List<RepositoryInfo>, JsonArray>() {
-      @Override
-      public JsonArray apply(List<RepositoryInfo> input) {
-        return null == input ? null : (JsonArray) gson.toJsonTree(input);
-      }
-    };
-
-    final JsonArray gsonList = getBaseUrls(cluster, component, host, function);
-
-    if (null == gsonList) {
-      return "";
-    }
-
-    BaseUrlUpdater<JsonArray> updater = new BaseUrlUpdater<JsonArray>(gsonList) {
-      @Override
-      public JsonArray apply(final RepositoryVersionEntity rve) {
-
-        JsonArray result = new JsonArray();
-
-        for (JsonElement e : gsonList) {
-          JsonObject obj = e.getAsJsonObject();
-
-          String repoId = obj.has("repoId") ? obj.get("repoId").getAsString() : null;
-          String repoName = obj.has("repoName") ? obj.get("repoName").getAsString() : null;
-          String baseUrl = obj.has("baseUrl") ? obj.get("baseUrl").getAsString() : null;
-          String osType = obj.has("osType") ? obj.get("osType").getAsString() : null;
-
-          if (null == repoId || null == baseUrl || null == osType || null == repoName) {
-            continue;
-          }
-
-          for (OperatingSystemEntity ose : rve.getOperatingSystems()) {
-            if (ose.getOsType().equals(osType) && ose.isAmbariManagedRepos()) {
-              for (RepositoryEntity re : ose.getRepositories()) {
-                if (re.getName().equals(repoName) &&
-                    !re.getBaseUrl().equals(baseUrl)) {
-                  obj.addProperty("baseUrl", re.getBaseUrl());
-                }
-              }
-              result.add(e);
-            }
-          }
-        }
-
-        return result;
-      }
-    };
-
-    return updateBaseUrls(cluster, component, updater).toString();
-  }
-
-  /**
-   * Builds repository information for inclusion in a command.  This replaces escaping json on
-   * a command.
-   *
-   * @param cluster the cluster
-   * @param host    the host
-   * @return  the command repository
-   * @throws AmbariException
-   */
-  @Experimental(feature=ExperimentalFeature.PATCH_UPGRADES)
-  public CommandRepository getCommandRepository(final Cluster cluster, ServiceComponent component, final Host host) throws AmbariException {
-
-    final CommandRepository command = new CommandRepository();
-    StackId stackId = component.getDesiredStackId();
-    command.setRepositories(Collections.<RepositoryInfo>emptyList());
-    command.setStackName(stackId.getStackName());
-
-    final BaseUrlUpdater<Void> updater = new BaseUrlUpdater<Void>(null) {
-      @Override
-      public Void apply(RepositoryVersionEntity rve) {
-        command.setRepositoryVersionId(rve.getId());
-        command.setRepositoryVersion(rve.getVersion());
-        command.setStackName(rve.getStackName());
-        command.setResolved(rve.isResolved());
-
-        // !!! a repository version entity has all the repos worked out.  We shouldn't use
-        // the stack at all.
-        for (OperatingSystemEntity osEntity : rve.getOperatingSystems()) {
-          String osEntityFamily = osFamily.find(osEntity.getOsType());
-          if (osEntityFamily.equals(host.getOsFamily())) {
-            command.setRepositories(osEntity.getOsType(), osEntity.getRepositories());
-
-            if (!osEntity.isAmbariManagedRepos()) {
-              command.setNonManaged();
-            } else {
-              command.setUniqueSuffix(String.format("-repo-%s", rve.getId()));
-            }
-          }
-        }
-
-        return null;
-      }
-    };
-
-    updateBaseUrls(cluster, component, updater);
-
-    return command;
-  }
-
-  /**
-   * Executed by two different representations of repos.  When we are comfortable with the new
-   * implementation, this may be removed and called inline in {@link #getCommandRepository(Cluster, ServiceComponent, Host)}
-   *
-   * @param cluster   the cluster to isolate the stack
-   * @param component the component
-   * @param host      used to resolve the family for the repositories
-   * @param function  function that will transform the supplied repositories for specific use.
-   * @return <T> the type as defined by the supplied {@code function}.
-   * @throws AmbariException
-   */
-  @Experimental(feature = ExperimentalFeature.PATCH_UPGRADES)
-  private <T> T getBaseUrls(Cluster cluster, ServiceComponent component, Host host,
-                            Function<List<RepositoryInfo>, T> function) throws AmbariException {
-
-    String hostOsType = host.getOsType();
-    String hostOsFamily = host.getOsFamily();
-    String hostName = host.getHostName();
-
-    StackId stackId = component.getDesiredStackId();
-
-    Map<String, List<RepositoryInfo>> repos = getRepository(
-        stackId.getStackName(), stackId.getStackVersion());
-
-    String family = osFamily.find(hostOsType);
-    if (null == family) {
-      family = hostOsFamily;
-    }
-
-    final List<RepositoryInfo> repoInfos;
-
-    // !!! check for the most specific first
-    if (repos.containsKey(hostOsType)) {
-      repoInfos = repos.get(hostOsType);
-    } else if (null != family && repos.containsKey(family)) {
-      repoInfos = repos.get(family);
-    } else {
-      repoInfos = null;
-      LOG.warn("Could not retrieve repo information for host"
-          + ", hostname=" + hostName
-          + ", clusterName=" + cluster.getClusterName()
-          + ", stackInfo=" + stackId.getStackId());
-    }
-
-    // leave it to function implementation to handle null.
-    return function.apply(repoInfos);
-  }
-
-  /**
-   * Checks repo URLs against the current version for the cluster and makes
-   * adjustments to the Base URL when the current is different.
-   *
-   * @param <T> the result after appling the repository version, if found.
-   */
-  @Experimental(feature = ExperimentalFeature.PATCH_UPGRADES)
-  private <T> T updateBaseUrls(Cluster cluster, ServiceComponent component, BaseUrlUpdater<T> function) throws AmbariException {
-
-    RepositoryVersionEntity repositoryEntity = null;
-
-    // !!! try to find the component repo first
-    if (null != component) {
-      repositoryEntity = component.getDesiredRepositoryVersion();
-    } else {
-      LOG.info("Service component not passed in, attempt to resolve the repository for cluster {}",
-          cluster.getClusterName());
-    }
-
-    if (null == repositoryEntity && null != component) {
-      Service service = cluster.getService(component.getServiceName());
-
-      repositoryEntity = service.getDesiredRepositoryVersion();
-    }
-
-    if (null == repositoryEntity) {
-      LOG.info("Cluster {} has no specific Repository Versions.  Using stack-defined values", cluster.getClusterName());
-      return function.getDefault();
-    }
-
-    return function.apply(repositoryEntity);
-  }
 
   /**
    * Reads a Kerberos descriptor from the specified file path.
@@ -1689,24 +1465,6 @@ public class AmbariMetaInfo {
     return null;
   }
 
-  /**
-   * Class that is used to update base urls.  There are two implementations of this - when we no
-   * longer are sure the deprecated repo info can be removed, so too can this class.
-   */
-  @Experimental(feature= ExperimentalFeature.PATCH_UPGRADES)
-  abstract static class BaseUrlUpdater<T> implements Function<RepositoryVersionEntity, T> {
-    private T m_default;
-
-    private BaseUrlUpdater(T defaultValue) {
-      m_default = defaultValue;
-    }
-
-    private T getDefault() {
-      return m_default;
-    }
-
-  }
-
   public File getCommonWidgetsDescriptorFile() {
     return commonWidgetsDescriptorFile;
   }

+ 8 - 5
ambari-server/src/main/java/org/apache/ambari/server/configuration/spring/ApiSecurityConfig.java

@@ -19,12 +19,13 @@ package org.apache.ambari.server.configuration.spring;
 
 import org.apache.ambari.server.security.AmbariEntryPoint;
 import org.apache.ambari.server.security.authentication.AmbariDelegatingAuthenticationFilter;
+import org.apache.ambari.server.security.authentication.AmbariLocalAuthenticationProvider;
+import org.apache.ambari.server.security.authentication.jwt.AmbariJwtAuthenticationProvider;
 import org.apache.ambari.server.security.authentication.kerberos.AmbariAuthToLocalUserDetailsService;
 import org.apache.ambari.server.security.authentication.kerberos.AmbariKerberosTicketValidator;
+import org.apache.ambari.server.security.authentication.pam.AmbariPamAuthenticationProvider;
 import org.apache.ambari.server.security.authorization.AmbariAuthorizationFilter;
 import org.apache.ambari.server.security.authorization.AmbariLdapAuthenticationProvider;
-import org.apache.ambari.server.security.authorization.AmbariLocalUserProvider;
-import org.apache.ambari.server.security.authorization.AmbariPamAuthenticationProvider;
 import org.apache.ambari.server.security.authorization.internal.AmbariInternalAuthenticationProvider;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Bean;
@@ -61,14 +62,16 @@ public class ApiSecurityConfig extends WebSecurityConfigurerAdapter{
 
   @Autowired
   public void configureAuthenticationManager(AuthenticationManagerBuilder auth,
-                                             AmbariLocalUserProvider ambariLocalUserProvider,
+                                             AmbariJwtAuthenticationProvider ambariJwtAuthenticationProvider,
                                              AmbariPamAuthenticationProvider ambariPamAuthenticationProvider,
+                                             AmbariLocalAuthenticationProvider ambariLocalAuthenticationProvider,
                                              AmbariLdapAuthenticationProvider ambariLdapAuthenticationProvider,
                                              AmbariInternalAuthenticationProvider ambariInternalAuthenticationProvider,
                                              KerberosServiceAuthenticationProvider kerberosServiceAuthenticationProvider
-                                        ) {
-    auth.authenticationProvider(ambariLocalUserProvider)
+  ) {
+    auth.authenticationProvider(ambariJwtAuthenticationProvider)
         .authenticationProvider(ambariPamAuthenticationProvider)
+        .authenticationProvider(ambariLocalAuthenticationProvider)
         .authenticationProvider(ambariLdapAuthenticationProvider)
         .authenticationProvider(ambariInternalAuthenticationProvider)
         .authenticationProvider(kerberosServiceAuthenticationProvider);

+ 18 - 7
ambari-server/src/main/java/org/apache/ambari/server/configuration/spring/GuiceBeansConfig.java

@@ -19,9 +19,11 @@ package org.apache.ambari.server.configuration.spring;
 
 import org.apache.ambari.server.agent.stomp.AgentsRegistrationQueue;
 import org.apache.ambari.server.audit.AuditLogger;
+import org.apache.ambari.server.security.authentication.AmbariAuthenticationEventHandlerImpl;
+import org.apache.ambari.server.security.authentication.AmbariLocalAuthenticationProvider;
+import org.apache.ambari.server.security.authentication.jwt.AmbariJwtAuthenticationProvider;
+import org.apache.ambari.server.security.authentication.pam.AmbariPamAuthenticationProvider;
 import org.apache.ambari.server.security.authorization.AmbariLdapAuthenticationProvider;
-import org.apache.ambari.server.security.authorization.AmbariLocalUserProvider;
-import org.apache.ambari.server.security.authorization.AmbariPamAuthenticationProvider;
 import org.apache.ambari.server.security.authorization.AmbariUserAuthorizationFilter;
 import org.apache.ambari.server.security.authorization.PermissionHelper;
 import org.apache.ambari.server.security.authorization.internal.AmbariInternalAuthenticationProvider;
@@ -65,11 +67,6 @@ public class GuiceBeansConfig {
     return injector.getInstance(AmbariLdapAuthenticationProvider.class);
   }
 
-  @Bean
-  public AmbariLocalUserProvider localUserProvider() {
-    return injector.getInstance(AmbariLocalUserProvider.class);
-  }
-
   @Bean
   public AmbariLdapDataPopulator ambariLdapDataPopulator() {
     return injector.getInstance(AmbariLdapDataPopulator.class);
@@ -84,12 +81,26 @@ public class GuiceBeansConfig {
   public AmbariInternalAuthenticationProvider ambariInternalAuthenticationProvider() {
     return injector.getInstance(AmbariInternalAuthenticationProvider.class);
   }
+  @Bean
+  public AmbariJwtAuthenticationProvider ambariJwtAuthenticationProvider() {
+    return injector.getInstance(AmbariJwtAuthenticationProvider.class);
+  }
 
   @Bean
   public AmbariPamAuthenticationProvider ambariPamAuthenticationProvider() {
     return injector.getInstance(AmbariPamAuthenticationProvider.class);
   }
 
+  @Bean
+  public AmbariLocalAuthenticationProvider ambariLocalAuthenticationProvider() {
+    return injector.getInstance(AmbariLocalAuthenticationProvider.class);
+  }
+
+  @Bean
+  public AmbariAuthenticationEventHandlerImpl ambariAuthenticationEventHandler() {
+    return injector.getInstance(AmbariAuthenticationEventHandlerImpl.class);
+  }
+
 
   @Bean
   public AgentRegisteringQueueChecker agentRegisteringQueueChecker() {

+ 16 - 2
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java

@@ -61,6 +61,7 @@ import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.agent.AgentCommand.AgentCommandType;
+import org.apache.ambari.server.agent.CommandRepository;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.agent.ExecutionCommand.KeyNames;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
@@ -68,6 +69,7 @@ import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.internal.RequestOperationLevel;
 import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.state.Cluster;
@@ -405,7 +407,13 @@ public class AmbariCustomCommandExecutionHelper {
       Map<String, String> hostLevelParams = new TreeMap<>();
 
       // Set parameters required for re-installing clients on restart
-      hostLevelParams.put(REPO_INFO, ambariMetaInfo.getRepoInfoString(cluster, component, host));
+      String repoInfoString;
+      try {
+        repoInfoString = repoVersionHelper.getRepoInfoString(cluster, component, host);
+      } catch (SystemException e) {
+        throw new RuntimeException(e);
+      }
+      hostLevelParams.put(REPO_INFO, repoInfoString);
       hostLevelParams.put(STACK_NAME, stackId.getStackName());
       hostLevelParams.put(STACK_VERSION, stackId.getStackVersion());
 
@@ -514,7 +522,13 @@ public class AmbariCustomCommandExecutionHelper {
       execCmd.setCommandParams(commandParams);
       execCmd.setRoleParams(roleParams);
 
-      execCmd.setRepositoryFile(ambariMetaInfo.getCommandRepository(cluster, component, host));
+      CommandRepository commandRepository;
+      try {
+        commandRepository = repoVersionHelper.getCommandRepository(cluster, component, host);
+      } catch (SystemException e) {
+        throw new RuntimeException(e);
+      }
+      execCmd.setRepositoryFile(commandRepository);
 
       // perform any server side command related logic - eg - set desired states on restart
       applyCustomCommandBackendLogic(cluster, serviceName, componentName, commandName, hostName);

+ 22 - 5
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java

@@ -133,6 +133,7 @@ import org.apache.ambari.server.controller.metrics.MetricPropertyProviderFactory
 import org.apache.ambari.server.controller.metrics.MetricsCollectorHAManager;
 import org.apache.ambari.server.controller.metrics.timeline.cache.TimelineMetricCacheProvider;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.events.MetadataUpdateEvent;
 import org.apache.ambari.server.events.TopologyUpdateEvent;
@@ -2523,8 +2524,12 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     }
     StageUtils.useAmbariJdkInCommandParams(commandParams, configs);
 
-    // TODO add TODO to ambariMetaInfo.getRepoInfoString
-    String repoInfo = ambariMetaInfo.getRepoInfoString(cluster, component, host);
+    String repoInfo;
+    try {
+      repoInfo = repoVersionHelper.getRepoInfoString(cluster, component, host);
+    } catch (SystemException e) {
+      throw new RuntimeException(e);
+    }
     if (LOG.isDebugEnabled()) {
       LOG.debug("Sending repo information to agent, hostname={}, clusterName={}, stackInfo={}, repoInfo={}",
         scHost.getHostName(), clusterName, stackId.getStackId(), repoInfo);
@@ -2594,7 +2599,13 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     execCmd.setRoleParams(roleParams);
     execCmd.setCommandParams(commandParams);
 
-    execCmd.setRepositoryFile(ambariMetaInfo.getCommandRepository(cluster, component, host));
+    CommandRepository commandRepository;
+    try {
+      commandRepository = repoVersionHelper.getCommandRepository(cluster, component, host);
+    } catch (SystemException e) {
+      throw new RuntimeException(e);
+    }
+    execCmd.setRepositoryFile(commandRepository);
     execCmdWrapper.setVersions(cluster);
 
     if ((execCmd != null) && (execCmd.getConfigurationTags().containsKey("cluster-env"))) {
@@ -5812,8 +5823,14 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     Map<Long, CommandRepository> hostRepositories = new HashMap<>();
     Map<String, Long> componentsRepos = new HashMap<>();
     for (ServiceComponentHost serviceComponentHost : hostComponents) {
-      CommandRepository commandRepository = ambariMetaInfo.getCommandRepository(cluster,
-          serviceComponentHost.getServiceComponent(), host);
+
+      CommandRepository commandRepository;
+      try {
+        commandRepository = repoVersionHelper.getCommandRepository(cluster,
+            serviceComponentHost.getServiceComponent(), host);
+      } catch (SystemException e) {
+        throw new RuntimeException(e);
+      }
       hostRepositories.put(commandRepository.getRepoVersionId(), commandRepository);
       componentsRepos.put(serviceComponentHost.getServiceComponentName(), commandRepository.getRepoVersionId());
     }

+ 59 - 63
ambari-server/src/main/java/org/apache/ambari/server/events/publishers/AgentCommandsPublisher.java

@@ -27,9 +27,11 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.TreeMap;
 
 import org.apache.ambari.server.AmbariException;
@@ -41,8 +43,10 @@ import org.apache.ambari.server.agent.stomp.dto.ExecutionCommandsCluster;
 import org.apache.ambari.server.events.ExecutionCommandEvent;
 import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileReader;
-import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileReaderFactory;
 import org.apache.ambari.server.serveraction.kerberos.KerberosServerAction;
+import org.apache.ambari.server.serveraction.kerberos.stageutils.KerberosKeytabController;
+import org.apache.ambari.server.serveraction.kerberos.stageutils.ResolvedKerberosKeytab;
+import org.apache.ambari.server.serveraction.kerberos.stageutils.ResolvedKerberosPrincipal;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.codec.binary.Base64;
@@ -60,11 +64,8 @@ import com.google.inject.Singleton;
 public class AgentCommandsPublisher {
   private static final Logger LOG = LoggerFactory.getLogger(AgentCommandsPublisher.class);
 
-  /**
-   * KerberosIdentityDataFileReaderFactory used to create KerberosIdentityDataFileReader instances
-   */
   @Inject
-  private KerberosIdentityDataFileReaderFactory kerberosIdentityDataFileReaderFactory;
+  private KerberosKeytabController kerberosKeytabController;
 
   @Inject
   private Clusters clusters;
@@ -175,80 +176,75 @@ public class AgentCommandsPublisher {
    */
   void injectKeytab(ExecutionCommand ec, String command, String targetHost) throws AmbariException {
     String dataDir = ec.getCommandParams().get(KerberosServerAction.DATA_DIRECTORY);
-
+    KerberosServerAction.KerberosCommandParameters kerberosCommandParameters = new KerberosServerAction.KerberosCommandParameters(ec);
     if(dataDir != null) {
-      KerberosIdentityDataFileReader reader = null;
       List<Map<String, String>> kcp = ec.getKerberosCommandParams();
 
       try {
-        reader = kerberosIdentityDataFileReaderFactory.createKerberosIdentityDataFileReader(new File(dataDir, KerberosIdentityDataFileReader.DATA_FILE_NAME));
-
-        for (Map<String, String> record : reader) {
-          String hostName = record.get(KerberosIdentityDataFileReader.HOSTNAME);
-
-          if (targetHost.equalsIgnoreCase(hostName)) {
-
-            if (SET_KEYTAB.equalsIgnoreCase(command)) {
-              String keytabFilePath = record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH);
-
-              if (keytabFilePath != null) {
-
-                String sha1Keytab = DigestUtils.sha1Hex(keytabFilePath);
-                File keytabFile = new File(dataDir + File.separator + hostName + File.separator + sha1Keytab);
-
-                if (keytabFile.canRead()) {
-                  Map<String, String> keytabMap = new HashMap<>();
-                  String principal = record.get(KerberosIdentityDataFileReader.PRINCIPAL);
-                  String isService = record.get(KerberosIdentityDataFileReader.SERVICE);
-
+        Set<ResolvedKerberosKeytab> keytabsToInject = kerberosKeytabController.getFilteredKeytabs((Map<String, Collection<String>>)kerberosCommandParameters.getServiceComponentFilter(), kerberosCommandParameters.getHostFilter(), kerberosCommandParameters.getIdentityFilter());
+        for (ResolvedKerberosKeytab resolvedKeytab : keytabsToInject) {
+          for(ResolvedKerberosPrincipal resolvedPrincipal: resolvedKeytab.getPrincipals()) {
+            String hostName = resolvedPrincipal.getHostName();
+
+            if (targetHost.equalsIgnoreCase(hostName)) {
+
+              if (SET_KEYTAB.equalsIgnoreCase(command)) {
+                String keytabFilePath = resolvedKeytab.getFile();
+
+                if (keytabFilePath != null) {
+
+                  String sha1Keytab = DigestUtils.sha256Hex(keytabFilePath);
+                  File keytabFile = new File(dataDir + File.separator + hostName + File.separator + sha1Keytab);
+
+                  if (keytabFile.canRead()) {
+                    Map<String, String> keytabMap = new HashMap<>();
+                    String principal = resolvedPrincipal.getPrincipal();
+
+                    keytabMap.put(KerberosIdentityDataFileReader.HOSTNAME, hostName);
+                    keytabMap.put(KerberosIdentityDataFileReader.PRINCIPAL, principal);
+                    keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH, keytabFilePath);
+                    keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_NAME, resolvedKeytab.getOwnerName());
+                    keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS, resolvedKeytab.getOwnerAccess());
+                    keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_NAME, resolvedKeytab.getGroupName());
+                    keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_ACCESS, resolvedKeytab.getGroupAccess());
+
+                    BufferedInputStream bufferedIn = new BufferedInputStream(new FileInputStream(keytabFile));
+                    byte[] keytabContent = null;
+                    try {
+                      keytabContent = IOUtils.toByteArray(bufferedIn);
+                    } finally {
+                      bufferedIn.close();
+                    }
+                    String keytabContentBase64 = Base64.encodeBase64String(keytabContent);
+                    keytabMap.put(KerberosServerAction.KEYTAB_CONTENT_BASE64, keytabContentBase64);
+
+                    kcp.add(keytabMap);
+                  }
+                }
+              } else if (REMOVE_KEYTAB.equalsIgnoreCase(command) || CHECK_KEYTABS.equalsIgnoreCase(command)) {
+                Map<String, String> keytabMap = new HashMap<>();
+                String keytabFilePath = resolvedKeytab.getFile();
+
+                String principal = resolvedPrincipal.getPrincipal();
+                for (Map.Entry<String, String> mappingEntry: resolvedPrincipal.getServiceMapping().entries()) {
+                  String serviceName = mappingEntry.getKey();
+                  String componentName = mappingEntry.getValue();
                   keytabMap.put(KerberosIdentityDataFileReader.HOSTNAME, hostName);
-                  keytabMap.put(KerberosIdentityDataFileReader.SERVICE, isService);
-                  keytabMap.put(KerberosIdentityDataFileReader.COMPONENT, record.get(KerberosIdentityDataFileReader.COMPONENT));
+                  keytabMap.put(KerberosIdentityDataFileReader.SERVICE, serviceName);
+                  keytabMap.put(KerberosIdentityDataFileReader.COMPONENT, componentName);
                   keytabMap.put(KerberosIdentityDataFileReader.PRINCIPAL, principal);
                   keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH, keytabFilePath);
-                  keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_NAME, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_NAME));
-                  keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS));
-                  keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_NAME, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_NAME));
-                  keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_ACCESS, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_ACCESS));
-
-                  BufferedInputStream bufferedIn = new BufferedInputStream(new FileInputStream(keytabFile));
-                  byte[] keytabContent = null;
-                  try {
-                    keytabContent = IOUtils.toByteArray(bufferedIn);
-                  } finally {
-                    bufferedIn.close();
-                  }
-                  String keytabContentBase64 = Base64.encodeBase64String(keytabContent);
-                  keytabMap.put(KerberosServerAction.KEYTAB_CONTENT_BASE64, keytabContentBase64);
 
-                  kcp.add(keytabMap);
                 }
-              }
-            } else if (REMOVE_KEYTAB.equalsIgnoreCase(command) || CHECK_KEYTABS.equalsIgnoreCase(command)) {
-              Map<String, String> keytabMap = new HashMap<>();
-
-              keytabMap.put(KerberosIdentityDataFileReader.HOSTNAME, hostName);
-              keytabMap.put(KerberosIdentityDataFileReader.SERVICE, record.get(KerberosIdentityDataFileReader.SERVICE));
-              keytabMap.put(KerberosIdentityDataFileReader.COMPONENT, record.get(KerberosIdentityDataFileReader.COMPONENT));
-              keytabMap.put(KerberosIdentityDataFileReader.PRINCIPAL, record.get(KerberosIdentityDataFileReader.PRINCIPAL));
-              keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH));
 
-              kcp.add(keytabMap);
+                kcp.add(keytabMap);
+              }
             }
           }
         }
       } catch (IOException e) {
         throw new AmbariException("Could not inject keytabs to enable kerberos");
-      } finally {
-        if (reader != null) {
-          try {
-            reader.close();
-          } catch (Throwable t) {
-            // ignored
-          }
-        }
       }
-
       ec.setKerberosCommandParams(kcp);
     }
   }

+ 1 - 3
ambari-server/src/main/java/org/apache/ambari/server/security/authentication/AmbariBasicAuthenticationFilter.java

@@ -108,10 +108,8 @@ public class AmbariBasicAuthenticationFilter extends BasicAuthenticationFilter i
    */
   @Override
   public void doFilterInternal(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, FilterChain filterChain) throws IOException, ServletException {
-
-
     if (eventHandler != null) {
-      eventHandler.beforeAttemptAuthentication(this, servletRequest, servletResponse);
+      eventHandler.beforeAttemptAuthentication(this, httpServletRequest, httpServletResponse);
     }
 
     super.doFilterInternal(httpServletRequest, httpServletResponse, filterChain);

+ 2 - 2
ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertUri.java

@@ -21,11 +21,11 @@ import java.net.URI;
 import java.util.Map;
 import java.util.Set;
 
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.state.kerberos.VariableReplacementHelper;
 
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.gson.annotations.SerializedName;
 
 /**

+ 3 - 4
ambari-server/src/main/java/org/apache/ambari/server/state/alert/Reporting.java

@@ -17,17 +17,16 @@
  */
 package org.apache.ambari.server.state.alert;
 
-import java.util.Objects;
-
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonProperty;
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Objects;
 
 import org.apache.ambari.server.alerts.Threshold;
 import org.apache.ambari.server.state.AlertState;
 
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.gson.annotations.SerializedName;
 
 /**

+ 18 - 0
ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java

@@ -621,5 +621,23 @@ public class RepositoryVersionHelper {
     });
   }
 
+  /**
+   * Get repository info given a cluster and host.
+   *
+   * @param cluster  the cluster
+   * @param host     the host
+   *
+   * @return the repo info
+   *
+   * @throws AmbariException if the repository information can not be obtained
+  public String getRepoInfoString(Cluster cluster, Host host) throws AmbariException {
+
+  return getRepoInfoString(cluster, host.getOsType(), host.getOsFamily(), host.getHostName());
+  }*/
+
+  public String getRepoInfoString(Cluster cluster, ServiceComponent component, Host host) throws AmbariException, SystemException {
+    return gson.toJson(getCommandRepository(cluster, component, host));
+  }
+
 
 }

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java

@@ -58,8 +58,8 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
 import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang.StringUtils;

+ 4 - 5
ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java

@@ -31,7 +31,6 @@ import static org.apache.ambari.server.agent.DummyHeartbeatConstants.HDFS;
 import static org.apache.ambari.server.agent.DummyHeartbeatConstants.HDFS_CLIENT;
 import static org.apache.ambari.server.agent.DummyHeartbeatConstants.NAMENODE;
 import static org.apache.ambari.server.agent.DummyHeartbeatConstants.SECONDARY_NAMENODE;
-import static org.apache.ambari.server.controller.KerberosHelperImpl.REMOVE_KEYTAB;
 import static org.apache.ambari.server.controller.KerberosHelperImpl.SET_KEYTAB;
 import static org.easymock.EasyMock.createMock;
 import static org.easymock.EasyMock.expect;
@@ -50,8 +49,8 @@ import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
-import java.lang.reflect.Method;
 import java.lang.reflect.Field;
+import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -1532,7 +1531,7 @@ public class TestHeartbeatHandler {
   }
 
 
-  private File createTestKeytabData(HeartBeatHandler heartbeatHandler) throws Exception {
+  private File createTestKeytabData(AgentCommandsPublisher agentCommandsPublisher) throws Exception {
     KerberosKeytabController kerberosKeytabControllerMock = createMock(KerberosKeytabController.class);
     expect(kerberosKeytabControllerMock.getFilteredKeytabs(null,null,null)).andReturn(
       Sets.newHashSet(
@@ -1561,9 +1560,9 @@ public class TestHeartbeatHandler {
 
     replay(kerberosKeytabControllerMock);
 
-    Field controllerField = heartbeatHandler.getClass().getDeclaredField("kerberosKeytabController");
+    Field controllerField = agentCommandsPublisher.getClass().getDeclaredField("kerberosKeytabController");
     controllerField.setAccessible(true);
-    controllerField.set(heartbeatHandler, kerberosKeytabControllerMock);
+    controllerField.set(agentCommandsPublisher, kerberosKeytabControllerMock);
 
     File dataDirectory = temporaryFolder.newFolder();
     File hostDirectory = new File(dataDirectory, "c6403.ambari.apache.org");

+ 2 - 1
ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java

@@ -48,6 +48,7 @@ import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.H2DatabaseCleaner;
 import org.apache.ambari.server.StackAccessException;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.internal.DeleteHostComponentStatusMetaData;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.metadata.ActionMetadata;
@@ -1838,7 +1839,7 @@ public class AmbariMetaInfoTest {
     List<AlertDefinitionEntity> definitions = dao.findAll(clusterId);
     assertEquals(13, definitions.size());
 
-    cluster.deleteService("HDFS");
+    cluster.deleteService("HDFS", new DeleteHostComponentStatusMetaData());
     metaInfo.reconcileAlertDefinitions(clusters, false);
     List<AlertDefinitionEntity> updatedDefinitions = dao.findAll(clusterId);
     assertEquals(7, updatedDefinitions.size());

+ 4 - 6
ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java

@@ -39,7 +39,6 @@ import org.apache.ambari.server.actionmanager.Request;
 import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.agent.CommandRepository;
 import org.apache.ambari.server.agent.ExecutionCommand;
-import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.internal.ComponentResourceProviderTest;
 import org.apache.ambari.server.controller.internal.RequestOperationLevel;
@@ -631,13 +630,12 @@ public class AmbariCustomCommandExecutionHelperTest {
     ServiceComponent componentZKC = serviceZK.getServiceComponent("ZOOKEEPER_CLIENT");
     Host host = clusters.getHost("c1-c6401");
 
-    AmbariMetaInfo ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
     StackDAO stackDAO = injector.getInstance(StackDAO.class);
     RepositoryVersionDAO repoVersionDAO = injector.getInstance(RepositoryVersionDAO.class);
     ServiceComponentDesiredStateDAO componentDAO = injector.getInstance(ServiceComponentDesiredStateDAO.class);
     RepositoryVersionHelper repoVersionHelper = injector.getInstance(RepositoryVersionHelper.class);
 
-    CommandRepository commandRepo = ambariMetaInfo.getCommandRepository(cluster, componentRM, host);
+    CommandRepository commandRepo = repoVersionHelper.getCommandRepository(cluster, componentRM, host);
     Assert.assertEquals(2, commandRepo.getRepositories().size());
 
 
@@ -646,7 +644,7 @@ public class AmbariCustomCommandExecutionHelperTest {
     repoDefinitionEntity1.setRepoID("new-id");
     repoDefinitionEntity1.setBaseUrl("http://foo");
     repoDefinitionEntity1.setRepoName("HDP");
-    RepoOsEntity repoOsEntity =newRepoOsEntity();
+    RepoOsEntity repoOsEntity = new RepoOsEntity();
     repoOsEntity.setFamily("redhat6");
     repoOsEntity.setAmbariManaged(true);
     repoOsEntity.addRepoDefinition(repoDefinitionEntity1); operatingSystems .add(repoOsEntity);
@@ -671,14 +669,14 @@ public class AmbariCustomCommandExecutionHelperTest {
     componentDAO.merge(componentEntity);
 
     // !!! make sure the override is set
-    commandRepo = ambariMetaInfo.getCommandRepository(cluster, componentRM, host);
+    commandRepo = repoVersionHelper.getCommandRepository(cluster, componentRM, host);
 
     Assert.assertEquals(1, commandRepo.getRepositories().size());
     CommandRepository.Repository repo = commandRepo.getRepositories().iterator().next();
     Assert.assertEquals("http://foo", repo.getBaseUrl());
 
     // verify that ZK has no repositories, since we haven't defined a repo version for ZKC
-    commandRepo = ambariMetaInfo.getCommandRepository(cluster, componentZKC, host);
+    commandRepo = repoVersionHelper.getCommandRepository(cluster, componentZKC, host);
     Assert.assertEquals(2, commandRepo.getRepositories().size());
   }
 

+ 1 - 1
ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java

@@ -84,9 +84,9 @@ import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.orm.dao.KerberosKeytabPrincipalDAO;
 import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
+import org.apache.ambari.server.orm.entities.KerberosKeytabPrincipalEntity;
 import org.apache.ambari.server.scheduler.ExecutionScheduler;
 import org.apache.ambari.server.scheduler.ExecutionSchedulerImpl;
-import org.apache.ambari.server.orm.entities.KerberosKeytabPrincipalEntity;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;

+ 1 - 1
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java

@@ -1024,7 +1024,7 @@ public class UpgradeResourceProviderTest extends EasyMockSupport {
 
     RepositoryVersionEntity repoVersionEntity = new RepositoryVersionEntity();
     repoVersionEntity.setDisplayName("My New Version 3");
-    repoVersionEntity.addRepoOsEntities(new ArrayList<>());
+    repoVersionEntity.addRepoOsEntities(createTestOperatingSystems());
     repoVersionEntity.setStack(stackEntity);
     repoVersionEntity.setVersion("2.2.2.3");
     repoVersionDao.create(repoVersionEntity);

+ 4 - 6
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserAuthenticationSourceResourceProviderTest.java

@@ -42,8 +42,6 @@ import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
-import org.apache.ambari.server.hooks.HookContextFactory;
-import org.apache.ambari.server.hooks.HookService;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.entities.UserAuthenticationEntity;
 import org.apache.ambari.server.orm.entities.UserEntity;
@@ -56,6 +54,7 @@ import org.apache.ambari.server.security.authorization.Users;
 import org.apache.ambari.server.stack.StackManagerFactory;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.stack.OsFamily;
+import org.apache.ambari.server.testutils.PartialNiceMockBinder;
 import org.apache.velocity.exception.ResourceNotFoundException;
 import org.easymock.EasyMockSupport;
 import org.junit.After;
@@ -64,7 +63,6 @@ import org.junit.Before;
 import org.junit.Test;
 import org.springframework.security.core.Authentication;
 import org.springframework.security.core.context.SecurityContextHolder;
-import org.springframework.security.crypto.password.PasswordEncoder;
 
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
@@ -182,16 +180,16 @@ public class UserAuthenticationSourceResourceProviderTest extends EasyMockSuppor
     return Guice.createInjector(new AbstractModule() {
       @Override
       protected void configure() {
+        PartialNiceMockBinder.newBuilder(UserAuthenticationSourceResourceProviderTest.this)
+            .addAmbariMetaInfoBinding().build().configure(binder());
+
         bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
         bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
         bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
         bind(AmbariMetaInfo.class).toInstance(createMock(AmbariMetaInfo.class));
         bind(Clusters.class).toInstance(createNiceMock(Clusters.class));
         bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
-        bind(PasswordEncoder.class).toInstance(createNiceMock(PasswordEncoder.class));
         bind(Users.class).toInstance(createMock(Users.class));
-        bind(HookService.class).toInstance(createMock(HookService.class));
-        bind(HookContextFactory.class).toInstance(createMock(HookContextFactory.class));
       }
     });
   }

+ 12 - 6
ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java

@@ -678,12 +678,18 @@ public class OrmTestHelper {
         repoDefinitionEntity2.setRepoID("HDP-UTILS");
         repoDefinitionEntity2.setBaseUrl("");
         repoDefinitionEntity2.setRepoName("HDP-UTILS");
-        RepoOsEntity repoOsEntity = new RepoOsEntity();
-        repoOsEntity.setFamily("redhat6");
-        repoOsEntity.setAmbariManaged(true);
-        repoOsEntity.addRepoDefinition(repoDefinitionEntity1);
-        repoOsEntity.addRepoDefinition(repoDefinitionEntity2);
-        operatingSystems.add(repoOsEntity);
+        RepoOsEntity repoOsEntityRedHat6 = new RepoOsEntity();
+        repoOsEntityRedHat6.setFamily("redhat6");
+        repoOsEntityRedHat6.setAmbariManaged(true);
+        repoOsEntityRedHat6.addRepoDefinition(repoDefinitionEntity1);
+        repoOsEntityRedHat6.addRepoDefinition(repoDefinitionEntity2);
+        RepoOsEntity repoOsEntityRedHat5 = new RepoOsEntity();
+        repoOsEntityRedHat5.setFamily("redhat5");
+        repoOsEntityRedHat5.setAmbariManaged(true);
+        repoOsEntityRedHat5.addRepoDefinition(repoDefinitionEntity1);
+        repoOsEntityRedHat5.addRepoDefinition(repoDefinitionEntity2);
+        operatingSystems.add(repoOsEntityRedHat6);
+        operatingSystems.add(repoOsEntityRedHat5);
 
 
         repositoryVersion = repositoryVersionDAO.create(stackEntity, version,

+ 1 - 1
ambari-server/src/test/java/org/apache/ambari/server/security/authentication/AmbariBasicAuthenticationFilterTest.java

@@ -117,7 +117,7 @@ public class AmbariBasicAuthenticationFilterTest extends EasyMockSupport {
     Capture<? extends AmbariAuthenticationFilter> captureFilter = newCapture(CaptureType.ALL);
 
     // GIVEN
-    HttpServletRequest request = createMock(HttpServletRequest.class);
+    HttpServletRequest request = createNiceMock(HttpServletRequest.class);
     HttpServletResponse response = createMock(HttpServletResponse.class);
     HttpSession session = createMock(HttpSession.class);
     FilterChain filterChain = createMock(FilterChain.class);

+ 1 - 0
ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java

@@ -156,6 +156,7 @@ public class ClusterTest {
   public void setup() throws Exception {
     injector = Guice.createInjector(Modules.override(new InMemoryDefaultTestModule()).with(new MockModule()));
     injector.getInstance(GuiceJpaInitializer.class);
+    EventBusSynchronizer.synchronizeAmbariEventPublisher(injector);
     clusters = injector.getInstance(Clusters.class);
     serviceFactory = injector.getInstance(ServiceFactory.class);
     configGroupFactory = injector.getInstance(ConfigGroupFactory.class);

+ 47 - 59
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java

@@ -122,6 +122,7 @@ import org.apache.ambari.server.actionmanager.ActionDBAccessorImpl;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
 import org.apache.ambari.server.actionmanager.HostRoleCommandFactoryImpl;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.actionmanager.StageFactory;
 import org.apache.ambari.server.actionmanager.StageFactoryImpl;
 import org.apache.ambari.server.agent.stomp.AgentConfigsHolder;
@@ -129,32 +130,30 @@ import org.apache.ambari.server.agent.stomp.MetadataHolder;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.audit.AuditLogger;
 import org.apache.ambari.server.audit.AuditLoggerDefaultImpl;
-import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AbstractRootServiceResponseFactory;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
-import org.apache.ambari.server.controller.KerberosHelper;
-import org.apache.ambari.server.controller.KerberosHelperImpl;
 import org.apache.ambari.server.controller.AmbariServer;
 import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.controller.KerberosHelperImpl;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.controller.RootServiceResponseFactory;
 import org.apache.ambari.server.controller.ServiceConfigVersionResponse;
+import org.apache.ambari.server.controller.internal.AmbariServerConfigurationCategory;
 import org.apache.ambari.server.events.MetadataUpdateEvent;
 import org.apache.ambari.server.hooks.HookService;
 import org.apache.ambari.server.hooks.users.UserHookService;
+import org.apache.ambari.server.ldap.domain.AmbariLdapConfigurationKeys;
 import org.apache.ambari.server.metadata.CachedRoleCommandOrderProvider;
 import org.apache.ambari.server.metadata.RoleCommandOrderProvider;
-import org.apache.ambari.server.controller.internal.AmbariServerConfigurationCategory;
-import org.apache.ambari.server.ldap.domain.AmbariLdapConfigurationKeys;
 import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.AmbariConfigurationDAO;
 import org.apache.ambari.server.scheduler.ExecutionScheduler;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;
-import org.apache.ambari.server.stack.StackManagerFactory;
-import org.apache.ambari.server.orm.dao.AmbariConfigurationDAO;
 import org.apache.ambari.server.serveraction.kerberos.PrepareKerberosIdentitiesServerAction;
+import org.apache.ambari.server.stack.StackManagerFactory;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
@@ -314,44 +313,6 @@ public class UpgradeCatalog300Test {
 
   @Test
   public void testExecuteDDLUpdates() throws Exception {
-    Module module = new AbstractModule() {
-      @Override
-      public void configure() {
-        PartialNiceMockBinder.newBuilder().addConfigsBindings().addFactoriesInstallBinding().build().configure(binder());
-
-        bind(DBAccessor.class).toInstance(dbAccessor);
-        bind(OsFamily.class).toInstance(osFamily);
-        bind(EntityManager.class).toInstance(entityManager);
-        bind(PersistedState.class).toInstance(mock(PersistedStateImpl.class));
-        bind(Clusters.class).toInstance(mock(ClustersImpl.class));
-        bind(SecurityHelper.class).toInstance(mock(SecurityHelper.class));
-        bind(HostRoleCommandFactory.class).to(HostRoleCommandFactoryImpl.class);
-        bind(ActionDBAccessor.class).toInstance(createNiceMock(ActionDBAccessorImpl.class));
-        bind(UnitOfWork.class).toInstance(createNiceMock(UnitOfWork.class));
-        bind(RoleCommandOrderProvider.class).to(CachedRoleCommandOrderProvider.class);
-        bind(StageFactory.class).to(StageFactoryImpl.class);
-        bind(AuditLogger.class).toInstance(createNiceMock(AuditLoggerDefaultImpl.class));
-        bind(PasswordEncoder.class).toInstance(new StandardPasswordEncoder());
-        bind(HookService.class).to(UserHookService.class);
-        bind(ServiceComponentHostFactory.class).toInstance(createNiceMock(ServiceComponentHostFactory.class));
-        bind(AbstractRootServiceResponseFactory.class).to(RootServiceResponseFactory.class);
-        bind(CredentialStoreService.class).toInstance(createNiceMock(CredentialStoreService.class));
-        bind(AmbariManagementController.class).toInstance(createNiceMock(AmbariManagementControllerImpl.class));
-        bind(ExecutionScheduler.class).toInstance(createNiceMock(ExecutionScheduler.class));
-        bind(AmbariMetaInfo.class).toInstance(createNiceMock(AmbariMetaInfo.class));
-        bind(KerberosHelper.class).toInstance(createNiceMock(KerberosHelperImpl.class));
-        bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
-
-        install(new FactoryModuleBuilder().implement(
-            Host.class, HostImpl.class).build(HostFactory.class));
-        install(new FactoryModuleBuilder().implement(
-            Cluster.class, ClusterImpl.class).build(ClusterFactory.class));
-        install(new FactoryModuleBuilder().build(UpgradeContextFactory.class));
-        install(new FactoryModuleBuilder().implement(
-            Service.class, ServiceImpl.class).build(ServiceFactory.class));
-      }
-    };
-
     // updateStageTable
     Capture<DBAccessor.DBColumnInfo> updateStageTableCaptures = newCapture(CaptureType.ALL);
     dbAccessor.addColumn(eq(STAGE_TABLE), capture(updateStageTableCaptures));
@@ -438,7 +399,7 @@ public class UpgradeCatalog300Test {
 
     replay(dbAccessor);
 
-    Injector injector = Guice.createInjector(module);
+    Injector injector = Guice.createInjector(getTestGuiceModule());
     UpgradeCatalog300 upgradeCatalog300 = injector.getInstance(UpgradeCatalog300.class);
     upgradeCatalog300.executeDDLUpdates();
 
@@ -506,14 +467,42 @@ public class UpgradeCatalog300Test {
   }
 
   private Module getTestGuiceModule() {
-    Module module = new Module() {
+    Module module = new AbstractModule() {
       @Override
-      public void configure(Binder binder) {
-        binder.bind(DBAccessor.class).toInstance(dbAccessor);
-        binder.bind(OsFamily.class).toInstance(osFamily);
-        binder.bind(EntityManager.class).toInstance(entityManager);
-        binder.bind(Configuration.class).toInstance(configuration);
-        binder.bind(AmbariConfigurationDAO.class).toInstance(ambariConfigurationDao);
+      public void configure() {
+        PartialNiceMockBinder.newBuilder().addConfigsBindings().addFactoriesInstallBinding().build().configure(binder());
+
+        bind(DBAccessor.class).toInstance(dbAccessor);
+        bind(OsFamily.class).toInstance(osFamily);
+        bind(EntityManager.class).toInstance(entityManager);
+        bind(AmbariConfigurationDAO.class).toInstance(ambariConfigurationDao);
+        bind(PersistedState.class).toInstance(mock(PersistedStateImpl.class));
+        bind(Clusters.class).toInstance(mock(ClustersImpl.class));
+        bind(SecurityHelper.class).toInstance(mock(SecurityHelper.class));
+        bind(HostRoleCommandFactory.class).to(HostRoleCommandFactoryImpl.class);
+        bind(ActionDBAccessor.class).toInstance(createNiceMock(ActionDBAccessorImpl.class));
+        bind(UnitOfWork.class).toInstance(createNiceMock(UnitOfWork.class));
+        bind(RoleCommandOrderProvider.class).to(CachedRoleCommandOrderProvider.class);
+        bind(StageFactory.class).to(StageFactoryImpl.class);
+        bind(AuditLogger.class).toInstance(createNiceMock(AuditLoggerDefaultImpl.class));
+        bind(PasswordEncoder.class).toInstance(new StandardPasswordEncoder());
+        bind(HookService.class).to(UserHookService.class);
+        bind(ServiceComponentHostFactory.class).toInstance(createNiceMock(ServiceComponentHostFactory.class));
+        bind(AbstractRootServiceResponseFactory.class).to(RootServiceResponseFactory.class);
+        bind(CredentialStoreService.class).toInstance(createNiceMock(CredentialStoreService.class));
+        bind(AmbariManagementController.class).toInstance(createNiceMock(AmbariManagementControllerImpl.class));
+        bind(ExecutionScheduler.class).toInstance(createNiceMock(ExecutionScheduler.class));
+        bind(AmbariMetaInfo.class).toInstance(createNiceMock(AmbariMetaInfo.class));
+        bind(KerberosHelper.class).toInstance(createNiceMock(KerberosHelperImpl.class));
+        bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
+
+        install(new FactoryModuleBuilder().implement(
+            Host.class, HostImpl.class).build(HostFactory.class));
+        install(new FactoryModuleBuilder().implement(
+            Cluster.class, ClusterImpl.class).build(ClusterFactory.class));
+        install(new FactoryModuleBuilder().build(UpgradeContextFactory.class));
+        install(new FactoryModuleBuilder().implement(
+            Service.class, ServiceImpl.class).build(ServiceFactory.class));
       }
     };
     return module;
@@ -990,18 +979,17 @@ public class UpgradeCatalog300Test {
   public void shouldSaveLdapConfigurationIfPropertyIsSetInAmbariProperties() throws Exception {
     final Module module = getTestGuiceModule();
 
-    expect(configuration.getProperty("ambari.ldap.isConfigured")).andReturn("true").anyTimes();
-
     expect(entityManager.find(anyObject(), anyObject())).andReturn(null).anyTimes();
     final Map<String, String> properties = new HashMap<>();
     properties.put(AmbariLdapConfigurationKeys.LDAP_ENABLED.key(), "true");
     expect(ambariConfigurationDao.reconcileCategory(AmbariServerConfigurationCategory.LDAP_CONFIGURATION.getCategoryName(), properties, false)).andReturn(true).once();
-    replay(configuration, entityManager, ambariConfigurationDao);
+    replay(entityManager, ambariConfigurationDao);
 
     final Injector injector = Guice.createInjector(module);
+    injector.getInstance(Configuration.class).setProperty("ambari.ldap.isConfigured", "true");
     final UpgradeCatalog300 upgradeCatalog300 = new UpgradeCatalog300(injector);
     upgradeCatalog300.upgradeLdapConfiguration();
-    verify(configuration, entityManager, ambariConfigurationDao);
+    verify(entityManager, ambariConfigurationDao);
   }
 
   @Test
@@ -1011,7 +999,7 @@ public class UpgradeCatalog300Test {
     final Map<String, String> properties = new HashMap<>();
     properties.put(AmbariLdapConfigurationKeys.LDAP_ENABLED.key(), "true");
     expect(ambariConfigurationDao.reconcileCategory(AmbariServerConfigurationCategory.LDAP_CONFIGURATION.getCategoryName(), properties, false)).andReturn(true).once();
-    replay(configuration, entityManager, ambariConfigurationDao);
+    replay(entityManager, ambariConfigurationDao);
 
     final Injector injector = Guice.createInjector(module);
     final UpgradeCatalog300 upgradeCatalog300 = new UpgradeCatalog300(injector);
@@ -1019,6 +1007,6 @@ public class UpgradeCatalog300Test {
 
     expectedException.expect(AssertionError.class);
     expectedException.expectMessage("Expectation failure on verify");
-    verify(configuration, entityManager, ambariConfigurationDao);
+    verify(entityManager, ambariConfigurationDao);
   }
 }